OSDN Git Service

Merge commit 'f9e3022c474619c69a46ae7dbe11b5b531dbad57' into am-0d58d39a-0539-474e...
authorEmilian Peev <epeev@google.com>
Fri, 11 May 2018 10:17:56 +0000 (03:17 -0700)
committerandroid-build-merger <android-build-merger@google.com>
Fri, 11 May 2018 10:17:56 +0000 (03:17 -0700)
am: a90677991f

Change-Id: I1ab47ca738094c00d962cf014c0f4e020a4f0ec9

95 files changed:
alsa_utils/alsa_device_profile.c
alsa_utils/alsa_device_proxy.c
alsa_utils/include/alsa_device_profile.h
alsa_utils/include/alsa_device_proxy.h
audio/Android.bp
audio/include/system/audio-base-utils.h [new file with mode: 0644]
audio/include/system/audio-base.h
audio/include/system/audio.h
audio/include/system/audio_effect-base.h
audio/include/system/audio_effect.h
audio/include/system/audio_effects/effect_dynamicsprocessing.h [new file with mode: 0644]
audio/include/system/audio_policy.h
audio/include/system/sound_trigger.h
audio_effects/include/audio_effects/effect_dynamicsprocessing.h [new file with mode: 0644]
audio_route/audio_route.c
audio_utils/channels.c
audio_utils/format.c
audio_utils/include/audio_utils/channels.h
audio_utils/include/audio_utils/clock.h
audio_utils/include/audio_utils/format.h
audio_utils/include/audio_utils/primitives.h
audio_utils/include/audio_utils/string.h [new file with mode: 0644]
audio_utils/primitives.c
audio_utils/tests/Android.bp
audio_utils/tests/build_and_run_all_unit_tests.sh
audio_utils/tests/channels_tests.cpp [new file with mode: 0644]
audio_utils/tests/fifo_multiprocess.cpp
audio_utils/tests/format_tests.cpp [new file with mode: 0644]
audio_utils/tests/primitives_benchmark.cpp [new file with mode: 0644]
audio_utils/tests/primitives_tests.cpp
audio_utils/tests/string_tests.cpp [new file with mode: 0644]
brillo/audio/audioservice/Android.mk [deleted file]
brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl [deleted file]
brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl [deleted file]
brillo/audio/audioservice/audio_daemon.cpp [deleted file]
brillo/audio/audioservice/audio_daemon.h [deleted file]
brillo/audio/audioservice/audio_daemon_handler.h [deleted file]
brillo/audio/audioservice/audio_device_handler.cpp [deleted file]
brillo/audio/audioservice/audio_device_handler.h [deleted file]
brillo/audio/audioservice/audio_service_callback.cpp [deleted file]
brillo/audio/audioservice/audio_service_callback.h [deleted file]
brillo/audio/audioservice/audio_volume_handler.cpp [deleted file]
brillo/audio/audioservice/audio_volume_handler.h [deleted file]
brillo/audio/audioservice/brillo_audio_client.cpp [deleted file]
brillo/audio/audioservice/brillo_audio_client.h [deleted file]
brillo/audio/audioservice/brillo_audio_client_helpers.cpp [deleted file]
brillo/audio/audioservice/brillo_audio_client_helpers.h [deleted file]
brillo/audio/audioservice/brillo_audio_device_info.cpp [deleted file]
brillo/audio/audioservice/brillo_audio_device_info_def.h [deleted file]
brillo/audio/audioservice/brillo_audio_device_info_internal.cpp [deleted file]
brillo/audio/audioservice/brillo_audio_device_info_internal.h [deleted file]
brillo/audio/audioservice/brillo_audio_manager.cpp [deleted file]
brillo/audio/audioservice/brillo_audio_service.h [deleted file]
brillo/audio/audioservice/brillo_audio_service_impl.cpp [deleted file]
brillo/audio/audioservice/brillo_audio_service_impl.h [deleted file]
brillo/audio/audioservice/brilloaudioserv.rc [deleted file]
brillo/audio/audioservice/include/brillo_audio_device_info.h [deleted file]
brillo/audio/audioservice/include/brillo_audio_manager.h [deleted file]
brillo/audio/audioservice/main_audio_service.cpp [deleted file]
brillo/audio/audioservice/test/audio_daemon_mock.h [deleted file]
brillo/audio/audioservice/test/audio_daemon_test.cpp [deleted file]
brillo/audio/audioservice/test/audio_device_handler_mock.h [deleted file]
brillo/audio/audioservice/test/audio_device_handler_test.cpp [deleted file]
brillo/audio/audioservice/test/audio_service_callback_test.cpp [deleted file]
brillo/audio/audioservice/test/audio_volume_handler_mock.h [deleted file]
brillo/audio/audioservice/test/audio_volume_handler_test.cpp [deleted file]
brillo/audio/audioservice/test/brillo_audio_client_mock.h [deleted file]
brillo/audio/audioservice/test/brillo_audio_client_test.cpp [deleted file]
brillo/audio/audioservice/test/brillo_audio_device_info_internal_test.cpp [deleted file]
brillo/audio/audioservice/test/brillo_audio_manager_test.cpp [deleted file]
brillo/audio/audioservice/test/brillo_audio_service_mock.h [deleted file]
camera/Android.bp
camera/docs/CameraMetadataEnums.mako
camera/docs/CameraMetadataKeys.mako
camera/docs/HidlMetadata.mako [new file with mode: 0644]
camera/docs/camera_device_info.proto
camera/docs/camera_metadata_tags.mako
camera/docs/docs.html
camera/docs/html.mako
camera/docs/metadata-check-dependencies
camera/docs/metadata-generate
camera/docs/metadata-parser-sanity-check
camera/docs/metadata-validate
camera/docs/metadata_definitions.xml [moved from camera/docs/metadata_properties.xml with 89% similarity]
camera/docs/metadata_definitions.xsd [moved from camera/docs/metadata_properties.xsd with 94% similarity]
camera/docs/metadata_helpers.py
camera/docs/metadata_model.py
camera/docs/metadata_model_test.py
camera/docs/metadata_parser_xml.py
camera/docs/metadata_template.mako
camera/docs/metadata_validate.py
camera/docs/ndk_camera_metadata_tags.mako
camera/docs/ndk_metadata_properties.xml [deleted file]
camera/include/system/camera_metadata_tags.h
camera/src/camera_metadata_tag_info.c

index 734edd7..b02d3b7 100644 (file)
@@ -79,16 +79,16 @@ void profile_init(alsa_device_profile* profile, int direction)
     profile_reset(profile);
 }
 
-bool profile_is_initialized(alsa_device_profile* profile)
+bool profile_is_initialized(const alsa_device_profile* profile)
 {
     return profile->card >= 0 && profile->device >= 0;
 }
 
-bool profile_is_valid(alsa_device_profile* profile) {
+bool profile_is_valid(const alsa_device_profile* profile) {
     return profile->is_valid;
 }
 
-bool profile_is_cached_for(alsa_device_profile* profile, int card, int device) {
+bool profile_is_cached_for(const alsa_device_profile* profile, int card, int device) {
     return card == profile->card && device == profile->device;
 }
 
@@ -107,7 +107,7 @@ static unsigned int round_to_16_mult(unsigned int size)
 /*
  * Returns the system defined minimum period size based on the supplied sample rate.
  */
-unsigned profile_calc_min_period_size(alsa_device_profile* profile, unsigned sample_rate)
+unsigned profile_calc_min_period_size(const alsa_device_profile* profile, unsigned sample_rate)
 {
     ALOGV("profile_calc_min_period_size(%p, rate:%d)", profile, sample_rate);
     if (profile == NULL) {
@@ -123,7 +123,7 @@ unsigned profile_calc_min_period_size(alsa_device_profile* profile, unsigned sam
     }
 }
 
-unsigned int profile_get_period_size(alsa_device_profile* profile, unsigned sample_rate)
+unsigned int profile_get_period_size(const alsa_device_profile* profile, unsigned sample_rate)
 {
     unsigned int period_size = profile_calc_min_period_size(profile, sample_rate);
     ALOGV("profile_get_period_size(rate:%d) = %d", sample_rate, period_size);
@@ -133,7 +133,7 @@ unsigned int profile_get_period_size(alsa_device_profile* profile, unsigned samp
 /*
  * Sample Rate
  */
-unsigned profile_get_default_sample_rate(alsa_device_profile* profile)
+unsigned profile_get_default_sample_rate(const alsa_device_profile* profile)
 {
     /*
      * TODO this won't be right in general. we should store a preferred rate as we are scanning.
@@ -142,7 +142,7 @@ unsigned profile_get_default_sample_rate(alsa_device_profile* profile)
     return profile_is_valid(profile) ? profile->sample_rates[0] : DEFAULT_SAMPLE_RATE;
 }
 
-bool profile_is_sample_rate_valid(alsa_device_profile* profile, unsigned rate)
+bool profile_is_sample_rate_valid(const alsa_device_profile* profile, unsigned rate)
 {
     if (profile_is_valid(profile)) {
         size_t index;
@@ -161,7 +161,7 @@ bool profile_is_sample_rate_valid(alsa_device_profile* profile, unsigned rate)
 /*
  * Format
  */
-enum pcm_format profile_get_default_format(alsa_device_profile* profile)
+enum pcm_format profile_get_default_format(const alsa_device_profile* profile)
 {
     /*
      * TODO this won't be right in general. we should store a preferred format as we are scanning.
@@ -169,7 +169,7 @@ enum pcm_format profile_get_default_format(alsa_device_profile* profile)
     return profile_is_valid(profile) ? profile->formats[0] : DEFAULT_SAMPLE_FORMAT;
 }
 
-bool profile_is_format_valid(alsa_device_profile* profile, enum pcm_format fmt) {
+bool profile_is_format_valid(const alsa_device_profile* profile, enum pcm_format fmt) {
     if (profile_is_valid(profile)) {
         size_t index;
         for (index = 0; profile->formats[index] != PCM_FORMAT_INVALID; index++) {
@@ -187,12 +187,12 @@ bool profile_is_format_valid(alsa_device_profile* profile, enum pcm_format fmt)
 /*
  * Channels
  */
-unsigned profile_get_default_channel_count(alsa_device_profile* profile)
+unsigned profile_get_default_channel_count(const alsa_device_profile* profile)
 {
     return profile_is_valid(profile) ? profile->channel_counts[0] : DEFAULT_CHANNEL_COUNT;
 }
 
-unsigned profile_get_closest_channel_count(alsa_device_profile* profile, unsigned count)
+unsigned profile_get_closest_channel_count(const alsa_device_profile* profile, unsigned count)
 {
     if (profile_is_valid(profile)) {
         if (count < profile->min_channel_count) {
@@ -207,7 +207,7 @@ unsigned profile_get_closest_channel_count(alsa_device_profile* profile, unsigne
     }
 }
 
-bool profile_is_channel_count_valid(alsa_device_profile* profile, unsigned count)
+bool profile_is_channel_count_valid(const alsa_device_profile* profile, unsigned count)
 {
     if (profile_is_initialized(profile)) {
         return count >= profile->min_channel_count && count <= profile->max_channel_count;
@@ -216,7 +216,7 @@ bool profile_is_channel_count_valid(alsa_device_profile* profile, unsigned count
     }
 }
 
-static bool profile_test_sample_rate(alsa_device_profile* profile, unsigned rate)
+static bool profile_test_sample_rate(const alsa_device_profile* profile, unsigned rate)
 {
     struct pcm_config config = profile->default_config;
     config.rate = rate;
@@ -415,10 +415,11 @@ bool profile_read_device_info(alsa_device_profile* profile)
 
     profile->is_valid = true;
 
+    pcm_params_free(alsa_hw_params);
     return true;
 }
 
-char * profile_get_sample_rate_strs(alsa_device_profile* profile)
+char * profile_get_sample_rate_strs(const alsa_device_profile* profile)
 {
     /* if we assume that rate strings are about 5 characters (48000 is 5), plus ~1 for a
      * delimiter "|" this buffer has room for about 22 rate strings which seems like
@@ -451,7 +452,7 @@ char * profile_get_sample_rate_strs(alsa_device_profile* profile)
     return strdup(buffer);
 }
 
-char * profile_get_format_strs(alsa_device_profile* profile)
+char * profile_get_format_strs(const alsa_device_profile* profile)
 {
     /* if we assume that format strings are about 24 characters (AUDIO_FORMAT_PCM_16_BIT is 23),
      * plus ~1 for a delimiter "|" this buffer has room for about 10 format strings which seems
@@ -482,7 +483,7 @@ char * profile_get_format_strs(alsa_device_profile* profile)
     return strdup(buffer);
 }
 
-char * profile_get_channel_count_strs(alsa_device_profile* profile)
+char * profile_get_channel_count_strs(const alsa_device_profile* profile)
 {
     // FIXME implicit fixed channel count assumption here (FCC_8).
     // we use only the canonical even number channel position masks.
index b5d0490..e64a42e 100644 (file)
@@ -21,8 +21,8 @@
 #include <log/log.h>
 
 #include <errno.h>
-
 #include <stdio.h>
+#include <string.h>
 
 #include "include/alsa_device_proxy.h"
 
@@ -41,7 +41,7 @@ static const unsigned format_byte_size_map[] = {
     3, /* PCM_FORMAT_S24_3LE */
 };
 
-int proxy_prepare(alsa_device_proxy * proxy, alsa_device_profile* profile,
+int proxy_prepare(alsa_device_proxy * proxy, const alsa_device_profile* profile,
                    struct pcm_config * config)
 {
     int ret = 0;
@@ -126,7 +126,7 @@ int proxy_prepare(alsa_device_proxy * proxy, alsa_device_profile* profile,
 
 int proxy_open(alsa_device_proxy * proxy)
 {
-    alsa_device_profile* profile = proxy->profile;
+    const alsa_device_profile* profile = proxy->profile;
     ALOGV("proxy_open(card:%d device:%d %s)", profile->card, profile->device,
           profile->direction == PCM_OUT ? "PCM_OUT" : "PCM_IN");
 
@@ -262,8 +262,8 @@ void proxy_dump(const alsa_device_proxy* proxy, int fd)
     }
 }
 
-int proxy_scan_rates(alsa_device_proxy * proxy, unsigned sample_rates[]) {
-    alsa_device_profile* profile = proxy->profile;
+int proxy_scan_rates(alsa_device_proxy * proxy, const unsigned sample_rates[]) {
+    const alsa_device_profile* profile = proxy->profile;
     if (profile->card < 0 || profile->device < 0) {
         return -EINVAL;
     }
index 8307d0a..8f581d9 100644 (file)
@@ -60,34 +60,34 @@ typedef struct  {
 } alsa_device_profile;
 
 void profile_init(alsa_device_profile* profile, int direction);
-bool profile_is_initialized(alsa_device_profile* profile);
-bool profile_is_valid(alsa_device_profile* profile);
-bool profile_is_cached_for(alsa_device_profile* profile, int card, int device);
+bool profile_is_initialized(const alsa_device_profile* profile);
+bool profile_is_valid(const alsa_device_profile* profile);
+bool profile_is_cached_for(const alsa_device_profile* profile, int card, int device);
 void profile_decache(alsa_device_profile* profile);
 
 bool profile_read_device_info(alsa_device_profile* profile);
 
 /* Audio Config Strings Methods */
-char * profile_get_sample_rate_strs(alsa_device_profile* profile);
-char * profile_get_format_strs(alsa_device_profile* profile);
-char * profile_get_channel_count_strs(alsa_device_profile* profile);
+char * profile_get_sample_rate_strs(const alsa_device_profile* profile);
+char * profile_get_format_strs(const alsa_device_profile* profile);
+char * profile_get_channel_count_strs(const alsa_device_profile* profile);
 
 /* Sample Rate Methods */
-unsigned profile_get_default_sample_rate(alsa_device_profile* profile);
-bool profile_is_sample_rate_valid(alsa_device_profile* profile, unsigned rate);
+unsigned profile_get_default_sample_rate(const alsa_device_profile* profile);
+bool profile_is_sample_rate_valid(const alsa_device_profile* profile, unsigned rate);
 
 /* Format Methods */
-enum pcm_format profile_get_default_format(alsa_device_profile* profile);
-bool profile_is_format_valid(alsa_device_profile* profile, enum pcm_format fmt);
+enum pcm_format profile_get_default_format(const alsa_device_profile* profile);
+bool profile_is_format_valid(const alsa_device_profile* profile, enum pcm_format fmt);
 
 /* Channel Methods */
-unsigned profile_get_default_channel_count(alsa_device_profile* profile);
-unsigned profile_get_closest_channel_count(alsa_device_profile* profile, unsigned count);
-bool profile_is_channel_count_valid(alsa_device_profile* profile, unsigned count);
+unsigned profile_get_default_channel_count(const alsa_device_profile* profile);
+unsigned profile_get_closest_channel_count(const alsa_device_profile* profile, unsigned count);
+bool profile_is_channel_count_valid(const alsa_device_profile* profile, unsigned count);
 
 /* Utility */
-unsigned profile_calc_min_period_size(alsa_device_profile* profile, unsigned sample_rate);
-unsigned int profile_get_period_size(alsa_device_profile* profile, unsigned sample_rate);
+unsigned profile_calc_min_period_size(const alsa_device_profile* profile, unsigned sample_rate);
+unsigned int profile_get_period_size(const alsa_device_profile* profile, unsigned sample_rate);
 
 /* Debugging */
 void profile_dump(const alsa_device_profile* profile, int fd);
index 677bb5e..64565e1 100644 (file)
@@ -22,7 +22,7 @@
 #include "alsa_device_profile.h"
 
 typedef struct {
-    alsa_device_profile* profile;
+    const alsa_device_profile* profile;
 
     struct pcm_config alsa_config;
 
@@ -34,7 +34,7 @@ typedef struct {
 
 
 /* State */
-int proxy_prepare(alsa_device_proxy * proxy, alsa_device_profile * profile,
+int proxy_prepare(alsa_device_proxy * proxy, const alsa_device_profile * profile,
                    struct pcm_config * config);
 int proxy_open(alsa_device_proxy * proxy);
 void proxy_close(alsa_device_proxy * proxy);
@@ -54,7 +54,7 @@ unsigned proxy_get_latency(const alsa_device_proxy * proxy);
  * returns the index of the first rate for which the ALSA device can be opened.
  * return negative value if none work or an error occurs.
  */
-int proxy_scan_rates(alsa_device_proxy * proxy, unsigned sample_rates[]);
+int proxy_scan_rates(alsa_device_proxy * proxy, const unsigned sample_rates[]);
 
 /* I/O */
 int proxy_write(alsa_device_proxy * proxy, const void *data, unsigned int count);
index 1e9e569..68043da 100644 (file)
@@ -15,5 +15,8 @@ cc_library_headers {
         windows: {
             enabled: true,
         },
+        vendor: {
+            cflags: ["AUDIO_NO_SYSTEM_DECLARATIONS"],
+        },
     }
 }
diff --git a/audio/include/system/audio-base-utils.h b/audio/include/system/audio-base-utils.h
new file mode 100644 (file)
index 0000000..016a085
--- /dev/null
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_BASE_UTILS_H
+#define ANDROID_AUDIO_BASE_UTILS_H
+
+#include "audio-base.h"
+
+/** Define helper values to iterate over enum, extend them or checking value validity.
+ *  Those values are compatible with the O corresponding enum values.
+ *  They are not macro like similar values in audio.h to avoid conflicting
+ *  with the libhardware_legacy audio.h.
+ */
+enum {
+    /** Number of audio stream available to vendors. */
+    AUDIO_STREAM_PUBLIC_CNT = AUDIO_STREAM_ACCESSIBILITY + 1,
+
+#ifndef AUDIO_NO_SYSTEM_DECLARATIONS
+    /** Total number of stream handled by the policy*/
+    AUDIO_STREAM_FOR_POLICY_CNT= AUDIO_STREAM_REROUTING + 1,
+#endif
+
+   /** Total number of stream. */
+    AUDIO_STREAM_CNT          = AUDIO_STREAM_PATCH + 1,
+
+    AUDIO_SOURCE_MAX          = AUDIO_SOURCE_UNPROCESSED,
+    AUDIO_SOURCE_CNT          = AUDIO_SOURCE_MAX + 1,
+
+    AUDIO_MODE_MAX            = AUDIO_MODE_IN_COMMUNICATION,
+    AUDIO_MODE_CNT            = AUDIO_MODE_MAX + 1,
+
+    /** For retrocompatibility AUDIO_MODE_* and AUDIO_STREAM_* must be signed. */
+    AUDIO_DETAIL_NEGATIVE_VALUE = -1,
+};
+
+enum {
+    AUDIO_CHANNEL_OUT_ALL     = AUDIO_CHANNEL_OUT_FRONT_LEFT |
+                                AUDIO_CHANNEL_OUT_FRONT_RIGHT |
+                                AUDIO_CHANNEL_OUT_FRONT_CENTER |
+                                AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
+                                AUDIO_CHANNEL_OUT_BACK_LEFT |
+                                AUDIO_CHANNEL_OUT_BACK_RIGHT |
+                                AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER |
+                                AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER |
+                                AUDIO_CHANNEL_OUT_BACK_CENTER |
+                                AUDIO_CHANNEL_OUT_SIDE_LEFT |
+                                AUDIO_CHANNEL_OUT_SIDE_RIGHT |
+                                AUDIO_CHANNEL_OUT_TOP_CENTER |
+                                AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT |
+                                AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER |
+                                AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT |
+                                AUDIO_CHANNEL_OUT_TOP_BACK_LEFT |
+                                AUDIO_CHANNEL_OUT_TOP_BACK_CENTER |
+                                AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT |
+                                AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT |
+                                AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT,
+
+    AUDIO_CHANNEL_IN_ALL      = AUDIO_CHANNEL_IN_LEFT |
+                                AUDIO_CHANNEL_IN_RIGHT |
+                                AUDIO_CHANNEL_IN_FRONT |
+                                AUDIO_CHANNEL_IN_BACK|
+                                AUDIO_CHANNEL_IN_LEFT_PROCESSED |
+                                AUDIO_CHANNEL_IN_RIGHT_PROCESSED |
+                                AUDIO_CHANNEL_IN_FRONT_PROCESSED |
+                                AUDIO_CHANNEL_IN_BACK_PROCESSED|
+                                AUDIO_CHANNEL_IN_PRESSURE |
+                                AUDIO_CHANNEL_IN_X_AXIS |
+                                AUDIO_CHANNEL_IN_Y_AXIS |
+                                AUDIO_CHANNEL_IN_Z_AXIS |
+                                AUDIO_CHANNEL_IN_VOICE_UPLINK |
+                                AUDIO_CHANNEL_IN_VOICE_DNLINK |
+                                AUDIO_CHANNEL_IN_BACK_LEFT |
+                                AUDIO_CHANNEL_IN_BACK_RIGHT |
+                                AUDIO_CHANNEL_IN_CENTER |
+                                AUDIO_CHANNEL_IN_LOW_FREQUENCY |
+                                AUDIO_CHANNEL_IN_TOP_LEFT |
+                                AUDIO_CHANNEL_IN_TOP_RIGHT,
+
+    AUDIO_DEVICE_OUT_ALL      = AUDIO_DEVICE_OUT_EARPIECE |
+                                AUDIO_DEVICE_OUT_SPEAKER |
+                                AUDIO_DEVICE_OUT_WIRED_HEADSET |
+                                AUDIO_DEVICE_OUT_WIRED_HEADPHONE |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_SCO |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER |
+                                AUDIO_DEVICE_OUT_HDMI |
+                                AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET |
+                                AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET |
+                                AUDIO_DEVICE_OUT_USB_ACCESSORY |
+                                AUDIO_DEVICE_OUT_USB_DEVICE |
+                                AUDIO_DEVICE_OUT_REMOTE_SUBMIX |
+                                AUDIO_DEVICE_OUT_TELEPHONY_TX |
+                                AUDIO_DEVICE_OUT_LINE |
+                                AUDIO_DEVICE_OUT_HDMI_ARC |
+                                AUDIO_DEVICE_OUT_SPDIF |
+                                AUDIO_DEVICE_OUT_FM |
+                                AUDIO_DEVICE_OUT_AUX_LINE |
+                                AUDIO_DEVICE_OUT_SPEAKER_SAFE |
+                                AUDIO_DEVICE_OUT_IP |
+                                AUDIO_DEVICE_OUT_BUS |
+                                AUDIO_DEVICE_OUT_PROXY |
+                                AUDIO_DEVICE_OUT_USB_HEADSET |
+                                AUDIO_DEVICE_OUT_HEARING_AID |
+                                AUDIO_DEVICE_OUT_ECHO_CANCELLER |
+                                AUDIO_DEVICE_OUT_DEFAULT,
+
+    AUDIO_DEVICE_OUT_ALL_A2DP = AUDIO_DEVICE_OUT_BLUETOOTH_A2DP |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER,
+
+    AUDIO_DEVICE_OUT_ALL_SCO  = AUDIO_DEVICE_OUT_BLUETOOTH_SCO |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
+                                AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+
+    AUDIO_DEVICE_OUT_ALL_USB  = AUDIO_DEVICE_OUT_USB_ACCESSORY |
+                                AUDIO_DEVICE_OUT_USB_DEVICE |
+                                AUDIO_DEVICE_OUT_USB_HEADSET,
+
+    AUDIO_DEVICE_IN_ALL       = AUDIO_DEVICE_IN_COMMUNICATION |
+                                AUDIO_DEVICE_IN_AMBIENT |
+                                AUDIO_DEVICE_IN_BUILTIN_MIC |
+                                AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET |
+                                AUDIO_DEVICE_IN_WIRED_HEADSET |
+                                AUDIO_DEVICE_IN_HDMI |
+                                AUDIO_DEVICE_IN_TELEPHONY_RX |
+                                AUDIO_DEVICE_IN_BACK_MIC |
+                                AUDIO_DEVICE_IN_REMOTE_SUBMIX |
+                                AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET |
+                                AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET |
+                                AUDIO_DEVICE_IN_USB_ACCESSORY |
+                                AUDIO_DEVICE_IN_USB_DEVICE |
+                                AUDIO_DEVICE_IN_FM_TUNER |
+                                AUDIO_DEVICE_IN_TV_TUNER |
+                                AUDIO_DEVICE_IN_LINE |
+                                AUDIO_DEVICE_IN_SPDIF |
+                                AUDIO_DEVICE_IN_BLUETOOTH_A2DP |
+                                AUDIO_DEVICE_IN_LOOPBACK |
+                                AUDIO_DEVICE_IN_IP |
+                                AUDIO_DEVICE_IN_BUS |
+                                AUDIO_DEVICE_IN_PROXY |
+                                AUDIO_DEVICE_IN_USB_HEADSET |
+                                AUDIO_DEVICE_IN_BLUETOOTH_BLE |
+                                AUDIO_DEVICE_IN_DEFAULT,
+
+    AUDIO_DEVICE_IN_ALL_SCO   = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET,
+
+    AUDIO_DEVICE_IN_ALL_USB   = AUDIO_DEVICE_IN_USB_ACCESSORY |
+                                AUDIO_DEVICE_IN_USB_DEVICE |
+                                AUDIO_DEVICE_IN_USB_HEADSET,
+
+    AUDIO_USAGE_MAX           = AUDIO_USAGE_ASSISTANT,
+    AUDIO_USAGE_CNT           = AUDIO_USAGE_ASSISTANT + 1,
+
+    AUDIO_PORT_CONFIG_ALL     = AUDIO_PORT_CONFIG_SAMPLE_RATE |
+                                AUDIO_PORT_CONFIG_CHANNEL_MASK |
+                                AUDIO_PORT_CONFIG_FORMAT |
+                                AUDIO_PORT_CONFIG_GAIN,
+}; // enum
+
+
+#endif  // ANDROID_AUDIO_BASE_UTILS_H
index 441226d..3d0638d 100644 (file)
@@ -1,9 +1,10 @@
-// This file is autogenerated by hidl-gen. Do not edit manually.
-// Source: android.hardware.audio.common@2.0
+// This file is autogenerated by hidl-gen
+// then manualy edited for retrocompatiblity
+// Source: android.hardware.audio.common@4.0
 // Root: android.hardware:hardware/interfaces
 
-#ifndef HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_COMMON_V2_0_EXPORTED_CONSTANTS_H_
-#define HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_COMMON_V2_0_EXPORTED_CONSTANTS_H_
+#ifndef HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_COMMON_V4_0_EXPORTED_CONSTANTS_H_
+#define HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_COMMON_V4_0_EXPORTED_CONSTANTS_H_
 
 #ifdef __cplusplus
 extern "C" {
@@ -30,11 +31,12 @@ typedef enum {
     AUDIO_STREAM_DTMF = 8,
     AUDIO_STREAM_TTS = 9,
     AUDIO_STREAM_ACCESSIBILITY = 10,
+#ifndef AUDIO_NO_SYSTEM_DECLARATIONS
+    /** For dynamic policy output mixes. Only used by the audio policy */
     AUDIO_STREAM_REROUTING = 11,
+    /** For audio flinger tracks volume. Only used by the audioflinger */
     AUDIO_STREAM_PATCH = 12,
-    AUDIO_STREAM_PUBLIC_CNT = 11, // (ACCESSIBILITY + 1)
-    AUDIO_STREAM_FOR_POLICY_CNT = 12, // PATCH
-    AUDIO_STREAM_CNT = 13, // (PATCH + 1)
+#endif // AUDIO_NO_SYSTEM_DECLARATIONS
 } audio_stream_type_t;
 
 typedef enum {
@@ -48,10 +50,15 @@ typedef enum {
     AUDIO_SOURCE_VOICE_COMMUNICATION = 7,
     AUDIO_SOURCE_REMOTE_SUBMIX = 8,
     AUDIO_SOURCE_UNPROCESSED = 9,
-    AUDIO_SOURCE_CNT = 10,
-    AUDIO_SOURCE_MAX = 9, // (CNT - 1)
     AUDIO_SOURCE_FM_TUNER = 1998,
+#ifndef AUDIO_NO_SYSTEM_DECLARATIONS
+    /**
+     * A low-priority, preemptible audio source for for background software
+     * hotword detection. Same tuning as VOICE_RECOGNITION.
+     * Used only internally by the framework.
+     */
     AUDIO_SOURCE_HOTWORD = 1999,
+#endif // AUDIO_NO_SYSTEM_DECLARATIONS
 } audio_source_t;
 
 typedef enum {
@@ -62,90 +69,112 @@ typedef enum {
 } audio_session_t;
 
 typedef enum {
-    AUDIO_FORMAT_INVALID = 4294967295u, // 0xFFFFFFFFUL
-    AUDIO_FORMAT_DEFAULT = 0u, // 0
-    AUDIO_FORMAT_PCM = 0u, // 0x00000000UL
-    AUDIO_FORMAT_MP3 = 16777216u, // 0x01000000UL
-    AUDIO_FORMAT_AMR_NB = 33554432u, // 0x02000000UL
-    AUDIO_FORMAT_AMR_WB = 50331648u, // 0x03000000UL
-    AUDIO_FORMAT_AAC = 67108864u, // 0x04000000UL
-    AUDIO_FORMAT_HE_AAC_V1 = 83886080u, // 0x05000000UL
-    AUDIO_FORMAT_HE_AAC_V2 = 100663296u, // 0x06000000UL
-    AUDIO_FORMAT_VORBIS = 117440512u, // 0x07000000UL
-    AUDIO_FORMAT_OPUS = 134217728u, // 0x08000000UL
-    AUDIO_FORMAT_AC3 = 150994944u, // 0x09000000UL
-    AUDIO_FORMAT_E_AC3 = 167772160u, // 0x0A000000UL
-    AUDIO_FORMAT_DTS = 184549376u, // 0x0B000000UL
-    AUDIO_FORMAT_DTS_HD = 201326592u, // 0x0C000000UL
-    AUDIO_FORMAT_IEC61937 = 218103808u, // 0x0D000000UL
-    AUDIO_FORMAT_DOLBY_TRUEHD = 234881024u, // 0x0E000000UL
-    AUDIO_FORMAT_EVRC = 268435456u, // 0x10000000UL
-    AUDIO_FORMAT_EVRCB = 285212672u, // 0x11000000UL
-    AUDIO_FORMAT_EVRCWB = 301989888u, // 0x12000000UL
-    AUDIO_FORMAT_EVRCNW = 318767104u, // 0x13000000UL
-    AUDIO_FORMAT_AAC_ADIF = 335544320u, // 0x14000000UL
-    AUDIO_FORMAT_WMA = 352321536u, // 0x15000000UL
-    AUDIO_FORMAT_WMA_PRO = 369098752u, // 0x16000000UL
-    AUDIO_FORMAT_AMR_WB_PLUS = 385875968u, // 0x17000000UL
-    AUDIO_FORMAT_MP2 = 402653184u, // 0x18000000UL
-    AUDIO_FORMAT_QCELP = 419430400u, // 0x19000000UL
-    AUDIO_FORMAT_DSD = 436207616u, // 0x1A000000UL
-    AUDIO_FORMAT_FLAC = 452984832u, // 0x1B000000UL
-    AUDIO_FORMAT_ALAC = 469762048u, // 0x1C000000UL
-    AUDIO_FORMAT_APE = 486539264u, // 0x1D000000UL
-    AUDIO_FORMAT_AAC_ADTS = 503316480u, // 0x1E000000UL
-    AUDIO_FORMAT_SBC = 520093696u, // 0x1F000000UL
-    AUDIO_FORMAT_APTX = 536870912u, // 0x20000000UL
-    AUDIO_FORMAT_APTX_HD = 553648128u, // 0x21000000UL
-    AUDIO_FORMAT_AC4 = 570425344u, // 0x22000000UL
-    AUDIO_FORMAT_LDAC = 587202560u, // 0x23000000UL
-    AUDIO_FORMAT_MAIN_MASK = 4278190080u, // 0xFF000000UL
-    AUDIO_FORMAT_SUB_MASK = 16777215u, // 0x00FFFFFFUL
-    AUDIO_FORMAT_PCM_SUB_16_BIT = 1u, // 0x1
-    AUDIO_FORMAT_PCM_SUB_8_BIT = 2u, // 0x2
-    AUDIO_FORMAT_PCM_SUB_32_BIT = 3u, // 0x3
-    AUDIO_FORMAT_PCM_SUB_8_24_BIT = 4u, // 0x4
-    AUDIO_FORMAT_PCM_SUB_FLOAT = 5u, // 0x5
-    AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED = 6u, // 0x6
-    AUDIO_FORMAT_MP3_SUB_NONE = 0u, // 0x0
-    AUDIO_FORMAT_AMR_SUB_NONE = 0u, // 0x0
-    AUDIO_FORMAT_AAC_SUB_MAIN = 1u, // 0x1
-    AUDIO_FORMAT_AAC_SUB_LC = 2u, // 0x2
-    AUDIO_FORMAT_AAC_SUB_SSR = 4u, // 0x4
-    AUDIO_FORMAT_AAC_SUB_LTP = 8u, // 0x8
-    AUDIO_FORMAT_AAC_SUB_HE_V1 = 16u, // 0x10
-    AUDIO_FORMAT_AAC_SUB_SCALABLE = 32u, // 0x20
-    AUDIO_FORMAT_AAC_SUB_ERLC = 64u, // 0x40
-    AUDIO_FORMAT_AAC_SUB_LD = 128u, // 0x80
-    AUDIO_FORMAT_AAC_SUB_HE_V2 = 256u, // 0x100
-    AUDIO_FORMAT_AAC_SUB_ELD = 512u, // 0x200
-    AUDIO_FORMAT_VORBIS_SUB_NONE = 0u, // 0x0
-    AUDIO_FORMAT_PCM_16_BIT = 1u, // (PCM | PCM_SUB_16_BIT)
-    AUDIO_FORMAT_PCM_8_BIT = 2u, // (PCM | PCM_SUB_8_BIT)
-    AUDIO_FORMAT_PCM_32_BIT = 3u, // (PCM | PCM_SUB_32_BIT)
-    AUDIO_FORMAT_PCM_8_24_BIT = 4u, // (PCM | PCM_SUB_8_24_BIT)
-    AUDIO_FORMAT_PCM_FLOAT = 5u, // (PCM | PCM_SUB_FLOAT)
-    AUDIO_FORMAT_PCM_24_BIT_PACKED = 6u, // (PCM | PCM_SUB_24_BIT_PACKED)
-    AUDIO_FORMAT_AAC_MAIN = 67108865u, // (AAC | AAC_SUB_MAIN)
-    AUDIO_FORMAT_AAC_LC = 67108866u, // (AAC | AAC_SUB_LC)
-    AUDIO_FORMAT_AAC_SSR = 67108868u, // (AAC | AAC_SUB_SSR)
-    AUDIO_FORMAT_AAC_LTP = 67108872u, // (AAC | AAC_SUB_LTP)
-    AUDIO_FORMAT_AAC_HE_V1 = 67108880u, // (AAC | AAC_SUB_HE_V1)
-    AUDIO_FORMAT_AAC_SCALABLE = 67108896u, // (AAC | AAC_SUB_SCALABLE)
-    AUDIO_FORMAT_AAC_ERLC = 67108928u, // (AAC | AAC_SUB_ERLC)
-    AUDIO_FORMAT_AAC_LD = 67108992u, // (AAC | AAC_SUB_LD)
-    AUDIO_FORMAT_AAC_HE_V2 = 67109120u, // (AAC | AAC_SUB_HE_V2)
-    AUDIO_FORMAT_AAC_ELD = 67109376u, // (AAC | AAC_SUB_ELD)
-    AUDIO_FORMAT_AAC_ADTS_MAIN = 503316481u, // (AAC_ADTS | AAC_SUB_MAIN)
-    AUDIO_FORMAT_AAC_ADTS_LC = 503316482u, // (AAC_ADTS | AAC_SUB_LC)
-    AUDIO_FORMAT_AAC_ADTS_SSR = 503316484u, // (AAC_ADTS | AAC_SUB_SSR)
-    AUDIO_FORMAT_AAC_ADTS_LTP = 503316488u, // (AAC_ADTS | AAC_SUB_LTP)
-    AUDIO_FORMAT_AAC_ADTS_HE_V1 = 503316496u, // (AAC_ADTS | AAC_SUB_HE_V1)
-    AUDIO_FORMAT_AAC_ADTS_SCALABLE = 503316512u, // (AAC_ADTS | AAC_SUB_SCALABLE)
-    AUDIO_FORMAT_AAC_ADTS_ERLC = 503316544u, // (AAC_ADTS | AAC_SUB_ERLC)
-    AUDIO_FORMAT_AAC_ADTS_LD = 503316608u, // (AAC_ADTS | AAC_SUB_LD)
-    AUDIO_FORMAT_AAC_ADTS_HE_V2 = 503316736u, // (AAC_ADTS | AAC_SUB_HE_V2)
-    AUDIO_FORMAT_AAC_ADTS_ELD = 503316992u, // (AAC_ADTS | AAC_SUB_ELD)
+    AUDIO_FORMAT_INVALID             = 0xFFFFFFFFu,
+    AUDIO_FORMAT_DEFAULT             = 0,
+    AUDIO_FORMAT_PCM                 = 0x00000000u,
+    AUDIO_FORMAT_MP3                 = 0x01000000u,
+    AUDIO_FORMAT_AMR_NB              = 0x02000000u,
+    AUDIO_FORMAT_AMR_WB              = 0x03000000u,
+    AUDIO_FORMAT_AAC                 = 0x04000000u,
+    AUDIO_FORMAT_HE_AAC_V1           = 0x05000000u,
+    AUDIO_FORMAT_HE_AAC_V2           = 0x06000000u,
+    AUDIO_FORMAT_VORBIS              = 0x07000000u,
+    AUDIO_FORMAT_OPUS                = 0x08000000u,
+    AUDIO_FORMAT_AC3                 = 0x09000000u,
+    AUDIO_FORMAT_E_AC3               = 0x0A000000u,
+    AUDIO_FORMAT_DTS                 = 0x0B000000u,
+    AUDIO_FORMAT_DTS_HD              = 0x0C000000u,
+    AUDIO_FORMAT_IEC61937            = 0x0D000000u,
+    AUDIO_FORMAT_DOLBY_TRUEHD        = 0x0E000000u,
+    AUDIO_FORMAT_EVRC                = 0x10000000u,
+    AUDIO_FORMAT_EVRCB               = 0x11000000u,
+    AUDIO_FORMAT_EVRCWB              = 0x12000000u,
+    AUDIO_FORMAT_EVRCNW              = 0x13000000u,
+    AUDIO_FORMAT_AAC_ADIF            = 0x14000000u,
+    AUDIO_FORMAT_WMA                 = 0x15000000u,
+    AUDIO_FORMAT_WMA_PRO             = 0x16000000u,
+    AUDIO_FORMAT_AMR_WB_PLUS         = 0x17000000u,
+    AUDIO_FORMAT_MP2                 = 0x18000000u,
+    AUDIO_FORMAT_QCELP               = 0x19000000u,
+    AUDIO_FORMAT_DSD                 = 0x1A000000u,
+    AUDIO_FORMAT_FLAC                = 0x1B000000u,
+    AUDIO_FORMAT_ALAC                = 0x1C000000u,
+    AUDIO_FORMAT_APE                 = 0x1D000000u,
+    AUDIO_FORMAT_AAC_ADTS            = 0x1E000000u,
+    AUDIO_FORMAT_SBC                 = 0x1F000000u,
+    AUDIO_FORMAT_APTX                = 0x20000000u,
+    AUDIO_FORMAT_APTX_HD             = 0x21000000u,
+    AUDIO_FORMAT_AC4                 = 0x22000000u,
+    AUDIO_FORMAT_LDAC                = 0x23000000u,
+    AUDIO_FORMAT_MAT                 = 0x24000000u,
+    AUDIO_FORMAT_MAIN_MASK           = 0xFF000000u,
+    AUDIO_FORMAT_SUB_MASK            = 0x00FFFFFFu,
+
+    /* Subformats */
+    AUDIO_FORMAT_PCM_SUB_16_BIT        = 0x1u,
+    AUDIO_FORMAT_PCM_SUB_8_BIT         = 0x2u,
+    AUDIO_FORMAT_PCM_SUB_32_BIT        = 0x3u,
+    AUDIO_FORMAT_PCM_SUB_8_24_BIT      = 0x4u,
+    AUDIO_FORMAT_PCM_SUB_FLOAT         = 0x5u,
+    AUDIO_FORMAT_PCM_SUB_24_BIT_PACKED = 0x6u,
+
+    AUDIO_FORMAT_MP3_SUB_NONE          = 0x0u,
+
+    AUDIO_FORMAT_AMR_SUB_NONE          = 0x0u,
+
+    AUDIO_FORMAT_AAC_SUB_MAIN          = 0x1u,
+    AUDIO_FORMAT_AAC_SUB_LC            = 0x2u,
+    AUDIO_FORMAT_AAC_SUB_SSR           = 0x4u,
+    AUDIO_FORMAT_AAC_SUB_LTP           = 0x8u,
+    AUDIO_FORMAT_AAC_SUB_HE_V1         = 0x10u,
+    AUDIO_FORMAT_AAC_SUB_SCALABLE      = 0x20u,
+    AUDIO_FORMAT_AAC_SUB_ERLC          = 0x40u,
+    AUDIO_FORMAT_AAC_SUB_LD            = 0x80u,
+    AUDIO_FORMAT_AAC_SUB_HE_V2         = 0x100u,
+    AUDIO_FORMAT_AAC_SUB_ELD           = 0x200u,
+    AUDIO_FORMAT_AAC_SUB_XHE           = 0x300u,
+
+    AUDIO_FORMAT_VORBIS_SUB_NONE       = 0x0u,
+
+    AUDIO_FORMAT_E_AC3_SUB_JOC         = 0x1u,
+
+    AUDIO_FORMAT_MAT_SUB_1_0           = 0x1u,
+    AUDIO_FORMAT_MAT_SUB_2_0           = 0x2u,
+    AUDIO_FORMAT_MAT_SUB_2_1           = 0x3u,
+
+    /* Aliases */
+    AUDIO_FORMAT_PCM_16_BIT            = 0x1u,        // (PCM | PCM_SUB_16_BIT)
+    AUDIO_FORMAT_PCM_8_BIT             = 0x2u,        // (PCM | PCM_SUB_8_BIT)
+    AUDIO_FORMAT_PCM_32_BIT            = 0x3u,        // (PCM | PCM_SUB_32_BIT)
+    AUDIO_FORMAT_PCM_8_24_BIT          = 0x4u,        // (PCM | PCM_SUB_8_24_BIT)
+    AUDIO_FORMAT_PCM_FLOAT             = 0x5u,        // (PCM | PCM_SUB_FLOAT)
+    AUDIO_FORMAT_PCM_24_BIT_PACKED     = 0x6u,        // (PCM | PCM_SUB_24_BIT_PACKED)
+    AUDIO_FORMAT_AAC_MAIN              = 0x4000001u,  // (AAC | AAC_SUB_MAIN)
+    AUDIO_FORMAT_AAC_LC                = 0x4000002u,  // (AAC | AAC_SUB_LC)
+    AUDIO_FORMAT_AAC_SSR               = 0x4000004u,  // (AAC | AAC_SUB_SSR)
+    AUDIO_FORMAT_AAC_LTP               = 0x4000008u,  // (AAC | AAC_SUB_LTP)
+    AUDIO_FORMAT_AAC_HE_V1             = 0x4000010u,  // (AAC | AAC_SUB_HE_V1)
+    AUDIO_FORMAT_AAC_SCALABLE          = 0x4000020u,  // (AAC | AAC_SUB_SCALABLE)
+    AUDIO_FORMAT_AAC_ERLC              = 0x4000040u,  // (AAC | AAC_SUB_ERLC)
+    AUDIO_FORMAT_AAC_LD                = 0x4000080u,  // (AAC | AAC_SUB_LD)
+    AUDIO_FORMAT_AAC_HE_V2             = 0x4000100u,  // (AAC | AAC_SUB_HE_V2)
+    AUDIO_FORMAT_AAC_ELD               = 0x4000200u,  // (AAC | AAC_SUB_ELD)
+    AUDIO_FORMAT_AAC_XHE               = 0x4000300u,  // (AAC | AAC_SUB_XHE)
+    AUDIO_FORMAT_AAC_ADTS_MAIN         = 0x1e000001u, // (AAC_ADTS | AAC_SUB_MAIN)
+    AUDIO_FORMAT_AAC_ADTS_LC           = 0x1e000002u, // (AAC_ADTS | AAC_SUB_LC)
+    AUDIO_FORMAT_AAC_ADTS_SSR          = 0x1e000004u, // (AAC_ADTS | AAC_SUB_SSR)
+    AUDIO_FORMAT_AAC_ADTS_LTP          = 0x1e000008u, // (AAC_ADTS | AAC_SUB_LTP)
+    AUDIO_FORMAT_AAC_ADTS_HE_V1        = 0x1e000010u, // (AAC_ADTS | AAC_SUB_HE_V1)
+    AUDIO_FORMAT_AAC_ADTS_SCALABLE     = 0x1e000020u, // (AAC_ADTS | AAC_SUB_SCALABLE)
+    AUDIO_FORMAT_AAC_ADTS_ERLC         = 0x1e000040u, // (AAC_ADTS | AAC_SUB_ERLC)
+    AUDIO_FORMAT_AAC_ADTS_LD           = 0x1e000080u, // (AAC_ADTS | AAC_SUB_LD)
+    AUDIO_FORMAT_AAC_ADTS_HE_V2        = 0x1e000100u, // (AAC_ADTS | AAC_SUB_HE_V2)
+    AUDIO_FORMAT_AAC_ADTS_ELD          = 0x1e000200u, // (AAC_ADTS | AAC_SUB_ELD)
+    AUDIO_FORMAT_AAC_ADTS_XHE          = 0x1e000300u, // (AAC_ADTS | AAC_SUB_XHE)
+    AUDIO_FORMAT_E_AC3_JOC             = 0xA000001u,  // (E_AC3 | E_AC3_SUB_JOC)
+    AUDIO_FORMAT_MAT_1_0               = 0x24000001u, // (MAT | MAT_SUB_1_0)
+    AUDIO_FORMAT_MAT_2_0               = 0x24000002u, // (MAT | MAT_SUB_2_0)
+    AUDIO_FORMAT_MAT_2_1               = 0x24000003u, // (MAT | MAT_SUB_2_1)
 } audio_format_t;
 
 enum {
@@ -154,186 +183,203 @@ enum {
 };
 
 enum {
-    AUDIO_CHANNEL_REPRESENTATION_POSITION = 0u, // 0
-    AUDIO_CHANNEL_REPRESENTATION_INDEX = 2u, // 2
-    AUDIO_CHANNEL_NONE = 0u, // 0x0
-    AUDIO_CHANNEL_INVALID = 3221225472u, // 0xC0000000
-    AUDIO_CHANNEL_OUT_FRONT_LEFT = 1u, // 0x1
-    AUDIO_CHANNEL_OUT_FRONT_RIGHT = 2u, // 0x2
-    AUDIO_CHANNEL_OUT_FRONT_CENTER = 4u, // 0x4
-    AUDIO_CHANNEL_OUT_LOW_FREQUENCY = 8u, // 0x8
-    AUDIO_CHANNEL_OUT_BACK_LEFT = 16u, // 0x10
-    AUDIO_CHANNEL_OUT_BACK_RIGHT = 32u, // 0x20
-    AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 64u, // 0x40
-    AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 128u, // 0x80
-    AUDIO_CHANNEL_OUT_BACK_CENTER = 256u, // 0x100
-    AUDIO_CHANNEL_OUT_SIDE_LEFT = 512u, // 0x200
-    AUDIO_CHANNEL_OUT_SIDE_RIGHT = 1024u, // 0x400
-    AUDIO_CHANNEL_OUT_TOP_CENTER = 2048u, // 0x800
-    AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT = 4096u, // 0x1000
-    AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER = 8192u, // 0x2000
-    AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT = 16384u, // 0x4000
-    AUDIO_CHANNEL_OUT_TOP_BACK_LEFT = 32768u, // 0x8000
-    AUDIO_CHANNEL_OUT_TOP_BACK_CENTER = 65536u, // 0x10000
-    AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT = 131072u, // 0x20000
-    AUDIO_CHANNEL_OUT_MONO = 1u, // OUT_FRONT_LEFT
-    AUDIO_CHANNEL_OUT_STEREO = 3u, // (OUT_FRONT_LEFT | OUT_FRONT_RIGHT)
-    AUDIO_CHANNEL_OUT_2POINT1 = 11u, // ((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_LOW_FREQUENCY)
-    AUDIO_CHANNEL_OUT_QUAD = 51u, // (((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_BACK_LEFT) | OUT_BACK_RIGHT)
-    AUDIO_CHANNEL_OUT_QUAD_BACK = 51u, // OUT_QUAD
-    AUDIO_CHANNEL_OUT_QUAD_SIDE = 1539u, // (((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_SIDE_LEFT) | OUT_SIDE_RIGHT)
-    AUDIO_CHANNEL_OUT_SURROUND = 263u, // (((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_FRONT_CENTER) | OUT_BACK_CENTER)
-    AUDIO_CHANNEL_OUT_PENTA = 55u, // (OUT_QUAD | OUT_FRONT_CENTER)
-    AUDIO_CHANNEL_OUT_5POINT1 = 63u, // (((((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_FRONT_CENTER) | OUT_LOW_FREQUENCY) | OUT_BACK_LEFT) | OUT_BACK_RIGHT)
-    AUDIO_CHANNEL_OUT_5POINT1_BACK = 63u, // OUT_5POINT1
-    AUDIO_CHANNEL_OUT_5POINT1_SIDE = 1551u, // (((((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_FRONT_CENTER) | OUT_LOW_FREQUENCY) | OUT_SIDE_LEFT) | OUT_SIDE_RIGHT)
-    AUDIO_CHANNEL_OUT_6POINT1 = 319u, // ((((((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_FRONT_CENTER) | OUT_LOW_FREQUENCY) | OUT_BACK_LEFT) | OUT_BACK_RIGHT) | OUT_BACK_CENTER)
-    AUDIO_CHANNEL_OUT_7POINT1 = 1599u, // (((((((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_FRONT_CENTER) | OUT_LOW_FREQUENCY) | OUT_BACK_LEFT) | OUT_BACK_RIGHT) | OUT_SIDE_LEFT) | OUT_SIDE_RIGHT)
-    AUDIO_CHANNEL_OUT_ALL = 262143u, // (((((((((((((((((OUT_FRONT_LEFT | OUT_FRONT_RIGHT) | OUT_FRONT_CENTER) | OUT_LOW_FREQUENCY) | OUT_BACK_LEFT) | OUT_BACK_RIGHT) | OUT_FRONT_LEFT_OF_CENTER) | OUT_FRONT_RIGHT_OF_CENTER) | OUT_BACK_CENTER) | OUT_SIDE_LEFT) | OUT_SIDE_RIGHT) | OUT_TOP_CENTER) | OUT_TOP_FRONT_LEFT) | OUT_TOP_FRONT_CENTER) | OUT_TOP_FRONT_RIGHT) | OUT_TOP_BACK_LEFT) | OUT_TOP_BACK_CENTER) | OUT_TOP_BACK_RIGHT)
-    AUDIO_CHANNEL_IN_LEFT = 4u, // 0x4
-    AUDIO_CHANNEL_IN_RIGHT = 8u, // 0x8
-    AUDIO_CHANNEL_IN_FRONT = 16u, // 0x10
-    AUDIO_CHANNEL_IN_BACK = 32u, // 0x20
-    AUDIO_CHANNEL_IN_LEFT_PROCESSED = 64u, // 0x40
-    AUDIO_CHANNEL_IN_RIGHT_PROCESSED = 128u, // 0x80
-    AUDIO_CHANNEL_IN_FRONT_PROCESSED = 256u, // 0x100
-    AUDIO_CHANNEL_IN_BACK_PROCESSED = 512u, // 0x200
-    AUDIO_CHANNEL_IN_PRESSURE = 1024u, // 0x400
-    AUDIO_CHANNEL_IN_X_AXIS = 2048u, // 0x800
-    AUDIO_CHANNEL_IN_Y_AXIS = 4096u, // 0x1000
-    AUDIO_CHANNEL_IN_Z_AXIS = 8192u, // 0x2000
-    AUDIO_CHANNEL_IN_VOICE_UPLINK = 16384u, // 0x4000
-    AUDIO_CHANNEL_IN_VOICE_DNLINK = 32768u, // 0x8000
-    AUDIO_CHANNEL_IN_MONO = 16u, // IN_FRONT
-    AUDIO_CHANNEL_IN_STEREO = 12u, // (IN_LEFT | IN_RIGHT)
-    AUDIO_CHANNEL_IN_FRONT_BACK = 48u, // (IN_FRONT | IN_BACK)
-    AUDIO_CHANNEL_IN_6 = 252u, // (((((IN_LEFT | IN_RIGHT) | IN_FRONT) | IN_BACK) | IN_LEFT_PROCESSED) | IN_RIGHT_PROCESSED)
-    AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO = 16400u, // (IN_VOICE_UPLINK | IN_MONO)
-    AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO = 32784u, // (IN_VOICE_DNLINK | IN_MONO)
-    AUDIO_CHANNEL_IN_VOICE_CALL_MONO = 49168u, // (IN_VOICE_UPLINK_MONO | IN_VOICE_DNLINK_MONO)
-    AUDIO_CHANNEL_IN_ALL = 65532u, // (((((((((((((IN_LEFT | IN_RIGHT) | IN_FRONT) | IN_BACK) | IN_LEFT_PROCESSED) | IN_RIGHT_PROCESSED) | IN_FRONT_PROCESSED) | IN_BACK_PROCESSED) | IN_PRESSURE) | IN_X_AXIS) | IN_Y_AXIS) | IN_Z_AXIS) | IN_VOICE_UPLINK) | IN_VOICE_DNLINK)
-    AUDIO_CHANNEL_COUNT_MAX = 30u, // 30
-    AUDIO_CHANNEL_INDEX_HDR = 2147483648u, // (REPRESENTATION_INDEX << COUNT_MAX)
-    AUDIO_CHANNEL_INDEX_MASK_1 = 2147483649u, // (INDEX_HDR | ((1 << 1) - 1))
-    AUDIO_CHANNEL_INDEX_MASK_2 = 2147483651u, // (INDEX_HDR | ((1 << 2) - 1))
-    AUDIO_CHANNEL_INDEX_MASK_3 = 2147483655u, // (INDEX_HDR | ((1 << 3) - 1))
-    AUDIO_CHANNEL_INDEX_MASK_4 = 2147483663u, // (INDEX_HDR | ((1 << 4) - 1))
-    AUDIO_CHANNEL_INDEX_MASK_5 = 2147483679u, // (INDEX_HDR | ((1 << 5) - 1))
-    AUDIO_CHANNEL_INDEX_MASK_6 = 2147483711u, // (INDEX_HDR | ((1 << 6) - 1))
-    AUDIO_CHANNEL_INDEX_MASK_7 = 2147483775u, // (INDEX_HDR | ((1 << 7) - 1))
-    AUDIO_CHANNEL_INDEX_MASK_8 = 2147483903u, // (INDEX_HDR | ((1 << 8) - 1))
-};
+    AUDIO_CHANNEL_REPRESENTATION_POSITION   = 0x0u,
+    AUDIO_CHANNEL_REPRESENTATION_INDEX      = 0x2u,
+    AUDIO_CHANNEL_NONE                      = 0x0u,
+    AUDIO_CHANNEL_INVALID                   = 0xC0000000u,
 
-enum {
-    AUDIO_INTERLEAVE_LEFT = 0,
-    AUDIO_INTERLEAVE_RIGHT = 1,
+    AUDIO_CHANNEL_OUT_FRONT_LEFT            = 0x1u,
+    AUDIO_CHANNEL_OUT_FRONT_RIGHT           = 0x2u,
+    AUDIO_CHANNEL_OUT_FRONT_CENTER          = 0x4u,
+    AUDIO_CHANNEL_OUT_LOW_FREQUENCY         = 0x8u,
+    AUDIO_CHANNEL_OUT_BACK_LEFT             = 0x10u,
+    AUDIO_CHANNEL_OUT_BACK_RIGHT            = 0x20u,
+    AUDIO_CHANNEL_OUT_FRONT_LEFT_OF_CENTER  = 0x40u,
+    AUDIO_CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x80u,
+    AUDIO_CHANNEL_OUT_BACK_CENTER           = 0x100u,
+    AUDIO_CHANNEL_OUT_SIDE_LEFT             = 0x200u,
+    AUDIO_CHANNEL_OUT_SIDE_RIGHT            = 0x400u,
+    AUDIO_CHANNEL_OUT_TOP_CENTER            = 0x800u,
+    AUDIO_CHANNEL_OUT_TOP_FRONT_LEFT        = 0x1000u,
+    AUDIO_CHANNEL_OUT_TOP_FRONT_CENTER      = 0x2000u,
+    AUDIO_CHANNEL_OUT_TOP_FRONT_RIGHT       = 0x4000u,
+    AUDIO_CHANNEL_OUT_TOP_BACK_LEFT         = 0x8000u,
+    AUDIO_CHANNEL_OUT_TOP_BACK_CENTER       = 0x10000u,
+    AUDIO_CHANNEL_OUT_TOP_BACK_RIGHT        = 0x20000u,
+    AUDIO_CHANNEL_OUT_TOP_SIDE_LEFT         = 0x40000u,
+    AUDIO_CHANNEL_OUT_TOP_SIDE_RIGHT        = 0x80000u,
+    AUDIO_CHANNEL_OUT_MONO                  = 0x1u,     // OUT_FRONT_LEFT
+    AUDIO_CHANNEL_OUT_STEREO                = 0x3u,     // OUT_FRONT_LEFT | OUT_FRONT_RIGHT
+    AUDIO_CHANNEL_OUT_2POINT1               = 0xBu,     // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_LOW_FREQUENCY
+    AUDIO_CHANNEL_OUT_2POINT0POINT2         = 0xC0003u, // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT
+    AUDIO_CHANNEL_OUT_2POINT1POINT2         = 0xC000Bu, // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT | OUT_LOW_FREQUENCY
+    AUDIO_CHANNEL_OUT_3POINT0POINT2         = 0xC0007u, // OUT_FRONT_LEFT | OUT_FRONT_CENTER | OUT_FRONT_RIGHT | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT
+    AUDIO_CHANNEL_OUT_3POINT1POINT2         = 0xC000Fu, // OUT_FRONT_LEFT | OUT_FRONT_CENTER | OUT_FRONT_RIGHT | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT | OUT_LOW_FREQUENCY
+    AUDIO_CHANNEL_OUT_QUAD                  = 0x33u,    // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_BACK_LEFT | OUT_BACK_RIGHT
+    AUDIO_CHANNEL_OUT_QUAD_BACK             = 0x33u,    // OUT_QUAD
+    AUDIO_CHANNEL_OUT_QUAD_SIDE             = 0x603u,   // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_SIDE_LEFT | OUT_SIDE_RIGHT
+    AUDIO_CHANNEL_OUT_SURROUND              = 0x107u,   // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_FRONT_CENTER | OUT_BACK_CENTER
+    AUDIO_CHANNEL_OUT_PENTA                 = 0x37u,    // OUT_QUAD | OUT_FRONT_CENTER
+    AUDIO_CHANNEL_OUT_5POINT1               = 0x3Fu,    // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | OUT_BACK_LEFT | OUT_BACK_RIGHT
+    AUDIO_CHANNEL_OUT_5POINT1_BACK          = 0x3Fu,    // OUT_5POINT1
+    AUDIO_CHANNEL_OUT_5POINT1_SIDE          = 0x60Fu,   // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | OUT_SIDE_LEFT | OUT_SIDE_RIGHT
+    AUDIO_CHANNEL_OUT_5POINT1POINT2         = 0xC003Fu, // OUT_5POINT1 | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT
+    AUDIO_CHANNEL_OUT_5POINT1POINT4         = 0x2D03Fu, // OUT_5POINT1 | OUT_TOP_FRONT_LEFT | OUT_TOP_FRONT_RIGHT | OUT_TOP_BACK_LEFT | OUT_TOP_BACK_RIGHT
+    AUDIO_CHANNEL_OUT_6POINT1               = 0x13Fu,   // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | OUT_BACK_LEFT | OUT_BACK_RIGHT | OUT_BACK_CENTER
+    AUDIO_CHANNEL_OUT_7POINT1               = 0x63Fu,   // OUT_FRONT_LEFT | OUT_FRONT_RIGHT | OUT_FRONT_CENTER | OUT_LOW_FREQUENCY | OUT_BACK_LEFT | OUT_BACK_RIGHT | OUT_SIDE_LEFT | OUT_SIDE_RIGHT
+    AUDIO_CHANNEL_OUT_7POINT1POINT2         = 0xC063Fu, // OUT_7POINT1 | OUT_TOP_SIDE_LEFT | OUT_TOP_SIDE_RIGHT
+    AUDIO_CHANNEL_OUT_7POINT1POINT4         = 0x2D63Fu, // OUT_7POINT1 | OUT_TOP_FRONT_LEFT | OUT_TOP_FRONT_RIGHT | OUT_TOP_BACK_LEFT | OUT_TOP_BACK_RIGHT
+
+    AUDIO_CHANNEL_IN_LEFT                   = 0x4u,
+    AUDIO_CHANNEL_IN_RIGHT                  = 0x8u,
+    AUDIO_CHANNEL_IN_FRONT                  = 0x10u,
+    AUDIO_CHANNEL_IN_BACK                   = 0x20u,
+    AUDIO_CHANNEL_IN_LEFT_PROCESSED         = 0x40u,
+    AUDIO_CHANNEL_IN_RIGHT_PROCESSED        = 0x80u,
+    AUDIO_CHANNEL_IN_FRONT_PROCESSED        = 0x100u,
+    AUDIO_CHANNEL_IN_BACK_PROCESSED         = 0x200u,
+    AUDIO_CHANNEL_IN_PRESSURE               = 0x400u,
+    AUDIO_CHANNEL_IN_X_AXIS                 = 0x800u,
+    AUDIO_CHANNEL_IN_Y_AXIS                 = 0x1000u,
+    AUDIO_CHANNEL_IN_Z_AXIS                 = 0x2000u,
+    AUDIO_CHANNEL_IN_BACK_LEFT              = 0x10000u,
+    AUDIO_CHANNEL_IN_BACK_RIGHT             = 0x20000u,
+    AUDIO_CHANNEL_IN_CENTER                 = 0x40000u,
+    AUDIO_CHANNEL_IN_LOW_FREQUENCY          = 0x100000u,
+    AUDIO_CHANNEL_IN_TOP_LEFT               = 0x200000u,
+    AUDIO_CHANNEL_IN_TOP_RIGHT              = 0x400000u,
+    AUDIO_CHANNEL_IN_VOICE_UPLINK           = 0x4000u,
+    AUDIO_CHANNEL_IN_VOICE_DNLINK           = 0x8000u,
+    AUDIO_CHANNEL_IN_MONO                   = 0x10u,     // IN_FRONT
+    AUDIO_CHANNEL_IN_STEREO                 = 0xCu,      // IN_LEFT | IN_RIGHT
+    AUDIO_CHANNEL_IN_FRONT_BACK             = 0x30u,     // IN_FRONT | IN_BACK
+    AUDIO_CHANNEL_IN_6                      = 0xFCu,     // IN_LEFT | IN_RIGHT | IN_FRONT | IN_BACK | IN_LEFT_PROCESSED | IN_RIGHT_PROCESSED
+    AUDIO_CHANNEL_IN_2POINT0POINT2          = 0x60000Cu, // IN_LEFT | IN_RIGHT | IN_TOP_LEFT | IN_TOP_RIGHT
+    AUDIO_CHANNEL_IN_2POINT1POINT2          = 0x70000Cu, // IN_LEFT | IN_RIGHT | IN_TOP_LEFT | IN_TOP_RIGHT | IN_LOW_FREQUENCY
+    AUDIO_CHANNEL_IN_3POINT0POINT2          = 0x64000Cu, // IN_LEFT | IN_CENTER | IN_RIGHT | IN_TOP_LEFT | IN_TOP_RIGHT
+    AUDIO_CHANNEL_IN_3POINT1POINT2          = 0x74000Cu, // IN_LEFT | IN_CENTER | IN_RIGHT | IN_TOP_LEFT | IN_TOP_RIGHT | IN_LOW_FREQUENCY
+    AUDIO_CHANNEL_IN_5POINT1                = 0x17000Cu, // IN_LEFT | IN_CENTER | IN_RIGHT | IN_BACK_LEFT | IN_BACK_RIGHT | IN_LOW_FREQUENCY
+    AUDIO_CHANNEL_IN_VOICE_UPLINK_MONO      = 0x4010u,   // IN_VOICE_UPLINK | IN_MONO
+    AUDIO_CHANNEL_IN_VOICE_DNLINK_MONO      = 0x8010u,   // IN_VOICE_DNLINK | IN_MONO
+    AUDIO_CHANNEL_IN_VOICE_CALL_MONO        = 0xC010u,   // IN_VOICE_UPLINK_MONO | IN_VOICE_DNLINK_MONO
+
+    AUDIO_CHANNEL_COUNT_MAX                 = 30u,
+    AUDIO_CHANNEL_INDEX_HDR                 = 0x80000000u, // REPRESENTATION_INDEX << COUNT_MAX
+    AUDIO_CHANNEL_INDEX_MASK_1              = 0x80000001u, // INDEX_HDR | (1 << 1) - 1
+    AUDIO_CHANNEL_INDEX_MASK_2              = 0x80000003u, // INDEX_HDR | (1 << 2) - 1
+    AUDIO_CHANNEL_INDEX_MASK_3              = 0x80000007u, // INDEX_HDR | (1 << 3) - 1
+    AUDIO_CHANNEL_INDEX_MASK_4              = 0x8000000Fu, // INDEX_HDR | (1 << 4) - 1
+    AUDIO_CHANNEL_INDEX_MASK_5              = 0x8000001Fu, // INDEX_HDR | (1 << 5) - 1
+    AUDIO_CHANNEL_INDEX_MASK_6              = 0x8000003Fu, // INDEX_HDR | (1 << 6) - 1
+    AUDIO_CHANNEL_INDEX_MASK_7              = 0x8000007Fu, // INDEX_HDR | (1 << 7) - 1
+    AUDIO_CHANNEL_INDEX_MASK_8              = 0x800000FFu, // INDEX_HDR | (1 << 8) - 1
 };
 
 typedef enum {
+#ifndef AUDIO_NO_SYSTEM_DECLARATIONS
     AUDIO_MODE_INVALID = -2, // (-2)
     AUDIO_MODE_CURRENT = -1, // (-1)
+#endif // AUDIO_NO_SYSTEM_DECLARATIONS
     AUDIO_MODE_NORMAL = 0,
     AUDIO_MODE_RINGTONE = 1,
     AUDIO_MODE_IN_CALL = 2,
     AUDIO_MODE_IN_COMMUNICATION = 3,
-    AUDIO_MODE_CNT = 4,
-    AUDIO_MODE_MAX = 3, // (CNT - 1)
 } audio_mode_t;
 
 enum {
-    AUDIO_DEVICE_NONE = 0u, // 0x0
-    AUDIO_DEVICE_BIT_IN = 2147483648u, // 0x80000000
-    AUDIO_DEVICE_BIT_DEFAULT = 1073741824u, // 0x40000000
-    AUDIO_DEVICE_OUT_EARPIECE = 1u, // 0x1
-    AUDIO_DEVICE_OUT_SPEAKER = 2u, // 0x2
-    AUDIO_DEVICE_OUT_WIRED_HEADSET = 4u, // 0x4
-    AUDIO_DEVICE_OUT_WIRED_HEADPHONE = 8u, // 0x8
-    AUDIO_DEVICE_OUT_BLUETOOTH_SCO = 16u, // 0x10
-    AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET = 32u, // 0x20
-    AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT = 64u, // 0x40
-    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP = 128u, // 0x80
-    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 256u, // 0x100
-    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 512u, // 0x200
-    AUDIO_DEVICE_OUT_AUX_DIGITAL = 1024u, // 0x400
-    AUDIO_DEVICE_OUT_HDMI = 1024u, // OUT_AUX_DIGITAL
-    AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET = 2048u, // 0x800
-    AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET = 4096u, // 0x1000
-    AUDIO_DEVICE_OUT_USB_ACCESSORY = 8192u, // 0x2000
-    AUDIO_DEVICE_OUT_USB_DEVICE = 16384u, // 0x4000
-    AUDIO_DEVICE_OUT_REMOTE_SUBMIX = 32768u, // 0x8000
-    AUDIO_DEVICE_OUT_TELEPHONY_TX = 65536u, // 0x10000
-    AUDIO_DEVICE_OUT_LINE = 131072u, // 0x20000
-    AUDIO_DEVICE_OUT_HDMI_ARC = 262144u, // 0x40000
-    AUDIO_DEVICE_OUT_SPDIF = 524288u, // 0x80000
-    AUDIO_DEVICE_OUT_FM = 1048576u, // 0x100000
-    AUDIO_DEVICE_OUT_AUX_LINE = 2097152u, // 0x200000
-    AUDIO_DEVICE_OUT_SPEAKER_SAFE = 4194304u, // 0x400000
-    AUDIO_DEVICE_OUT_IP = 8388608u, // 0x800000
-    AUDIO_DEVICE_OUT_BUS = 16777216u, // 0x1000000
-    AUDIO_DEVICE_OUT_PROXY = 33554432u, // 0x2000000
-    AUDIO_DEVICE_OUT_USB_HEADSET = 67108864u, // 0x4000000
-    AUDIO_DEVICE_OUT_DEFAULT = 1073741824u, // BIT_DEFAULT
-    AUDIO_DEVICE_OUT_ALL = 1207959551u, // (((((((((((((((((((((((((((OUT_EARPIECE | OUT_SPEAKER) | OUT_WIRED_HEADSET) | OUT_WIRED_HEADPHONE) | OUT_BLUETOOTH_SCO) | OUT_BLUETOOTH_SCO_HEADSET) | OUT_BLUETOOTH_SCO_CARKIT) | OUT_BLUETOOTH_A2DP) | OUT_BLUETOOTH_A2DP_HEADPHONES) | OUT_BLUETOOTH_A2DP_SPEAKER) | OUT_HDMI) | OUT_ANLG_DOCK_HEADSET) | OUT_DGTL_DOCK_HEADSET) | OUT_USB_ACCESSORY) | OUT_USB_DEVICE) | OUT_REMOTE_SUBMIX) | OUT_TELEPHONY_TX) | OUT_LINE) | OUT_HDMI_ARC) | OUT_SPDIF) | OUT_FM) | OUT_AUX_LINE) | OUT_SPEAKER_SAFE) | OUT_IP) | OUT_BUS) | OUT_PROXY) | OUT_USB_HEADSET) | OUT_DEFAULT)
-    AUDIO_DEVICE_OUT_ALL_A2DP = 896u, // ((OUT_BLUETOOTH_A2DP | OUT_BLUETOOTH_A2DP_HEADPHONES) | OUT_BLUETOOTH_A2DP_SPEAKER)
-    AUDIO_DEVICE_OUT_ALL_SCO = 112u, // ((OUT_BLUETOOTH_SCO | OUT_BLUETOOTH_SCO_HEADSET) | OUT_BLUETOOTH_SCO_CARKIT)
-    AUDIO_DEVICE_OUT_ALL_USB = 67133440u, // ((OUT_USB_ACCESSORY | OUT_USB_DEVICE) | OUT_USB_HEADSET)
-    AUDIO_DEVICE_IN_COMMUNICATION = 2147483649u, // (BIT_IN | 0x1)
-    AUDIO_DEVICE_IN_AMBIENT = 2147483650u, // (BIT_IN | 0x2)
-    AUDIO_DEVICE_IN_BUILTIN_MIC = 2147483652u, // (BIT_IN | 0x4)
-    AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET = 2147483656u, // (BIT_IN | 0x8)
-    AUDIO_DEVICE_IN_WIRED_HEADSET = 2147483664u, // (BIT_IN | 0x10)
-    AUDIO_DEVICE_IN_AUX_DIGITAL = 2147483680u, // (BIT_IN | 0x20)
-    AUDIO_DEVICE_IN_HDMI = 2147483680u, // IN_AUX_DIGITAL
-    AUDIO_DEVICE_IN_VOICE_CALL = 2147483712u, // (BIT_IN | 0x40)
-    AUDIO_DEVICE_IN_TELEPHONY_RX = 2147483712u, // IN_VOICE_CALL
-    AUDIO_DEVICE_IN_BACK_MIC = 2147483776u, // (BIT_IN | 0x80)
-    AUDIO_DEVICE_IN_REMOTE_SUBMIX = 2147483904u, // (BIT_IN | 0x100)
-    AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET = 2147484160u, // (BIT_IN | 0x200)
-    AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET = 2147484672u, // (BIT_IN | 0x400)
-    AUDIO_DEVICE_IN_USB_ACCESSORY = 2147485696u, // (BIT_IN | 0x800)
-    AUDIO_DEVICE_IN_USB_DEVICE = 2147487744u, // (BIT_IN | 0x1000)
-    AUDIO_DEVICE_IN_FM_TUNER = 2147491840u, // (BIT_IN | 0x2000)
-    AUDIO_DEVICE_IN_TV_TUNER = 2147500032u, // (BIT_IN | 0x4000)
-    AUDIO_DEVICE_IN_LINE = 2147516416u, // (BIT_IN | 0x8000)
-    AUDIO_DEVICE_IN_SPDIF = 2147549184u, // (BIT_IN | 0x10000)
-    AUDIO_DEVICE_IN_BLUETOOTH_A2DP = 2147614720u, // (BIT_IN | 0x20000)
-    AUDIO_DEVICE_IN_LOOPBACK = 2147745792u, // (BIT_IN | 0x40000)
-    AUDIO_DEVICE_IN_IP = 2148007936u, // (BIT_IN | 0x80000)
-    AUDIO_DEVICE_IN_BUS = 2148532224u, // (BIT_IN | 0x100000)
-    AUDIO_DEVICE_IN_PROXY = 2164260864u, // (BIT_IN | 0x1000000)
-    AUDIO_DEVICE_IN_USB_HEADSET = 2181038080u, // (BIT_IN | 0x2000000)
-    AUDIO_DEVICE_IN_DEFAULT = 3221225472u, // (BIT_IN | BIT_DEFAULT)
-    AUDIO_DEVICE_IN_ALL = 3273654271u, // (((((((((((((((((((((((IN_COMMUNICATION | IN_AMBIENT) | IN_BUILTIN_MIC) | IN_BLUETOOTH_SCO_HEADSET) | IN_WIRED_HEADSET) | IN_HDMI) | IN_TELEPHONY_RX) | IN_BACK_MIC) | IN_REMOTE_SUBMIX) | IN_ANLG_DOCK_HEADSET) | IN_DGTL_DOCK_HEADSET) | IN_USB_ACCESSORY) | IN_USB_DEVICE) | IN_FM_TUNER) | IN_TV_TUNER) | IN_LINE) | IN_SPDIF) | IN_BLUETOOTH_A2DP) | IN_LOOPBACK) | IN_IP) | IN_BUS) | IN_PROXY) | IN_USB_HEADSET) | IN_DEFAULT)
-    AUDIO_DEVICE_IN_ALL_SCO = 2147483656u, // IN_BLUETOOTH_SCO_HEADSET
-    AUDIO_DEVICE_IN_ALL_USB = 2181044224u, // ((IN_USB_ACCESSORY | IN_USB_DEVICE) | IN_USB_HEADSET)
+    AUDIO_DEVICE_NONE                          = 0x0u,
+    AUDIO_DEVICE_BIT_IN                        = 0x80000000u,
+    AUDIO_DEVICE_BIT_DEFAULT                   = 0x40000000u,
+
+    AUDIO_DEVICE_OUT_EARPIECE                  = 0x1u,
+    AUDIO_DEVICE_OUT_SPEAKER                   = 0x2u,
+    AUDIO_DEVICE_OUT_WIRED_HEADSET             = 0x4u,
+    AUDIO_DEVICE_OUT_WIRED_HEADPHONE           = 0x8u,
+    AUDIO_DEVICE_OUT_BLUETOOTH_SCO             = 0x10u,
+    AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET     = 0x20u,
+    AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT      = 0x40u,
+    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP            = 0x80u,
+    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100u,
+    AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER    = 0x200u,
+    AUDIO_DEVICE_OUT_AUX_DIGITAL               = 0x400u,
+    AUDIO_DEVICE_OUT_HDMI                      = 0x400u,      // OUT_AUX_DIGITAL
+    AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET         = 0x800u,
+    AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET         = 0x1000u,
+    AUDIO_DEVICE_OUT_USB_ACCESSORY             = 0x2000u,
+    AUDIO_DEVICE_OUT_USB_DEVICE                = 0x4000u,
+    AUDIO_DEVICE_OUT_REMOTE_SUBMIX             = 0x8000u,
+    AUDIO_DEVICE_OUT_TELEPHONY_TX              = 0x10000u,
+    AUDIO_DEVICE_OUT_LINE                      = 0x20000u,
+    AUDIO_DEVICE_OUT_HDMI_ARC                  = 0x40000u,
+    AUDIO_DEVICE_OUT_SPDIF                     = 0x80000u,
+    AUDIO_DEVICE_OUT_FM                        = 0x100000u,
+    AUDIO_DEVICE_OUT_AUX_LINE                  = 0x200000u,
+    AUDIO_DEVICE_OUT_SPEAKER_SAFE              = 0x400000u,
+    AUDIO_DEVICE_OUT_IP                        = 0x800000u,
+    AUDIO_DEVICE_OUT_BUS                       = 0x1000000u,
+    AUDIO_DEVICE_OUT_PROXY                     = 0x2000000u,
+    AUDIO_DEVICE_OUT_USB_HEADSET               = 0x4000000u,
+    AUDIO_DEVICE_OUT_HEARING_AID               = 0x8000000u,
+    AUDIO_DEVICE_OUT_ECHO_CANCELLER            = 0x10000000u,
+    AUDIO_DEVICE_OUT_DEFAULT                   = 0x40000000u, // BIT_DEFAULT
+
+    AUDIO_DEVICE_IN_COMMUNICATION              = 0x80000001u, // BIT_IN | 0x1
+    AUDIO_DEVICE_IN_AMBIENT                    = 0x80000002u, // BIT_IN | 0x2
+    AUDIO_DEVICE_IN_BUILTIN_MIC                = 0x80000004u, // BIT_IN | 0x4
+    AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET      = 0x80000008u, // BIT_IN | 0x8
+    AUDIO_DEVICE_IN_WIRED_HEADSET              = 0x80000010u, // BIT_IN | 0x10
+    AUDIO_DEVICE_IN_AUX_DIGITAL                = 0x80000020u, // BIT_IN | 0x20
+    AUDIO_DEVICE_IN_HDMI                       = 0x80000020u, // IN_AUX_DIGITAL
+    AUDIO_DEVICE_IN_VOICE_CALL                 = 0x80000040u, // BIT_IN | 0x40
+    AUDIO_DEVICE_IN_TELEPHONY_RX               = 0x80000040u, // IN_VOICE_CALL
+    AUDIO_DEVICE_IN_BACK_MIC                   = 0x80000080u, // BIT_IN | 0x80
+    AUDIO_DEVICE_IN_REMOTE_SUBMIX              = 0x80000100u, // BIT_IN | 0x100
+    AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET          = 0x80000200u, // BIT_IN | 0x200
+    AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET          = 0x80000400u, // BIT_IN | 0x400
+    AUDIO_DEVICE_IN_USB_ACCESSORY              = 0x80000800u, // BIT_IN | 0x800
+    AUDIO_DEVICE_IN_USB_DEVICE                 = 0x80001000u, // BIT_IN | 0x1000
+    AUDIO_DEVICE_IN_FM_TUNER                   = 0x80002000u, // BIT_IN | 0x2000
+    AUDIO_DEVICE_IN_TV_TUNER                   = 0x80004000u, // BIT_IN | 0x4000
+    AUDIO_DEVICE_IN_LINE                       = 0x80008000u, // BIT_IN | 0x8000
+    AUDIO_DEVICE_IN_SPDIF                      = 0x80010000u, // BIT_IN | 0x10000
+    AUDIO_DEVICE_IN_BLUETOOTH_A2DP             = 0x80020000u, // BIT_IN | 0x20000
+    AUDIO_DEVICE_IN_LOOPBACK                   = 0x80040000u, // BIT_IN | 0x40000
+    AUDIO_DEVICE_IN_IP                         = 0x80080000u, // BIT_IN | 0x80000
+    AUDIO_DEVICE_IN_BUS                        = 0x80100000u, // BIT_IN | 0x100000
+    AUDIO_DEVICE_IN_PROXY                      = 0x81000000u, // BIT_IN | 0x1000000
+    AUDIO_DEVICE_IN_USB_HEADSET                = 0x82000000u, // BIT_IN | 0x2000000
+    AUDIO_DEVICE_IN_BLUETOOTH_BLE              = 0x84000000u, // BIT_IN | 0x4000000
+    AUDIO_DEVICE_IN_DEFAULT                    = 0xC0000000u, // BIT_IN | BIT_DEFAULT
 };
 
 typedef enum {
-    AUDIO_OUTPUT_FLAG_NONE = 0, // 0x0
-    AUDIO_OUTPUT_FLAG_DIRECT = 1, // 0x1
-    AUDIO_OUTPUT_FLAG_PRIMARY = 2, // 0x2
-    AUDIO_OUTPUT_FLAG_FAST = 4, // 0x4
-    AUDIO_OUTPUT_FLAG_DEEP_BUFFER = 8, // 0x8
-    AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD = 16, // 0x10
-    AUDIO_OUTPUT_FLAG_NON_BLOCKING = 32, // 0x20
-    AUDIO_OUTPUT_FLAG_HW_AV_SYNC = 64, // 0x40
-    AUDIO_OUTPUT_FLAG_TTS = 128, // 0x80
-    AUDIO_OUTPUT_FLAG_RAW = 256, // 0x100
-    AUDIO_OUTPUT_FLAG_SYNC = 512, // 0x200
-    AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO = 1024, // 0x400
-    AUDIO_OUTPUT_FLAG_DIRECT_PCM = 8192, // 0x2000
-    AUDIO_OUTPUT_FLAG_MMAP_NOIRQ = 16384, // 0x4000
-    AUDIO_OUTPUT_FLAG_VOIP_RX = 32768, // 0x8000
+    AUDIO_OUTPUT_FLAG_NONE             = 0x0,
+    AUDIO_OUTPUT_FLAG_DIRECT           = 0x1,
+    AUDIO_OUTPUT_FLAG_PRIMARY          = 0x2,
+    AUDIO_OUTPUT_FLAG_FAST             = 0x4,
+    AUDIO_OUTPUT_FLAG_DEEP_BUFFER      = 0x8,
+    AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD = 0x10,
+    AUDIO_OUTPUT_FLAG_NON_BLOCKING     = 0x20,
+    AUDIO_OUTPUT_FLAG_HW_AV_SYNC       = 0x40,
+    AUDIO_OUTPUT_FLAG_TTS              = 0x80,
+    AUDIO_OUTPUT_FLAG_RAW              = 0x100,
+    AUDIO_OUTPUT_FLAG_SYNC             = 0x200,
+    AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO  = 0x400,
+    AUDIO_OUTPUT_FLAG_DIRECT_PCM       = 0x2000,
+    AUDIO_OUTPUT_FLAG_MMAP_NOIRQ       = 0x4000,
+    AUDIO_OUTPUT_FLAG_VOIP_RX          = 0x8000,
+    AUDIO_OUTPUT_FLAG_INCALL_MUSIC     = 0x10000,
 } audio_output_flags_t;
 
 typedef enum {
-    AUDIO_INPUT_FLAG_NONE = 0, // 0x0
-    AUDIO_INPUT_FLAG_FAST = 1, // 0x1
-    AUDIO_INPUT_FLAG_HW_HOTWORD = 2, // 0x2
-    AUDIO_INPUT_FLAG_RAW = 4, // 0x4
-    AUDIO_INPUT_FLAG_SYNC = 8, // 0x8
-    AUDIO_INPUT_FLAG_MMAP_NOIRQ = 16, // 0x10
-    AUDIO_INPUT_FLAG_VOIP_TX = 32, // 0x20
+    AUDIO_INPUT_FLAG_NONE       = 0x0,
+    AUDIO_INPUT_FLAG_FAST       = 0x1,
+    AUDIO_INPUT_FLAG_HW_HOTWORD = 0x2,
+    AUDIO_INPUT_FLAG_RAW        = 0x4,
+    AUDIO_INPUT_FLAG_SYNC       = 0x8,
+    AUDIO_INPUT_FLAG_MMAP_NOIRQ = 0x10,
+    AUDIO_INPUT_FLAG_VOIP_TX    = 0x20,
+    AUDIO_INPUT_FLAG_HW_AV_SYNC = 0x40,
 } audio_input_flags_t;
 
 typedef enum {
@@ -344,54 +390,61 @@ typedef enum {
     AUDIO_USAGE_ALARM = 4,
     AUDIO_USAGE_NOTIFICATION = 5,
     AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE = 6,
+#ifndef AUDIO_NO_SYSTEM_DECLARATIONS
     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST = 7,
     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT = 8,
     AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED = 9,
     AUDIO_USAGE_NOTIFICATION_EVENT = 10,
+#endif // AUDIO_NO_SYSTEM_DECLARATIONS
     AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY = 11,
     AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE = 12,
     AUDIO_USAGE_ASSISTANCE_SONIFICATION = 13,
     AUDIO_USAGE_GAME = 14,
     AUDIO_USAGE_VIRTUAL_SOURCE = 15,
     AUDIO_USAGE_ASSISTANT = 16,
-    AUDIO_USAGE_CNT = 17,
-    AUDIO_USAGE_MAX = 16, // (CNT - 1)
 } audio_usage_t;
 
+typedef enum {
+    AUDIO_CONTENT_TYPE_UNKNOWN = 0u,
+    AUDIO_CONTENT_TYPE_SPEECH = 1u,
+    AUDIO_CONTENT_TYPE_MUSIC = 2u,
+    AUDIO_CONTENT_TYPE_MOVIE = 3u,
+    AUDIO_CONTENT_TYPE_SONIFICATION = 4u,
+} audio_content_type_t;
+
 enum {
-    AUDIO_GAIN_MODE_JOINT = 1u, // 0x1
-    AUDIO_GAIN_MODE_CHANNELS = 2u, // 0x2
-    AUDIO_GAIN_MODE_RAMP = 4u, // 0x4
+    AUDIO_GAIN_MODE_JOINT    = 0x1u,
+    AUDIO_GAIN_MODE_CHANNELS = 0x2u,
+    AUDIO_GAIN_MODE_RAMP     = 0x4u,
 };
 
 typedef enum {
     AUDIO_PORT_ROLE_NONE = 0,
-    AUDIO_PORT_ROLE_SOURCE = 1,
-    AUDIO_PORT_ROLE_SINK = 2,
+    AUDIO_PORT_ROLE_SOURCE = 1, // (::android::hardware::audio::common::V4_0::AudioPortRole.NONE implicitly + 1)
+    AUDIO_PORT_ROLE_SINK = 2, // (::android::hardware::audio::common::V4_0::AudioPortRole.SOURCE implicitly + 1)
 } audio_port_role_t;
 
 typedef enum {
     AUDIO_PORT_TYPE_NONE = 0,
-    AUDIO_PORT_TYPE_DEVICE = 1,
-    AUDIO_PORT_TYPE_MIX = 2,
-    AUDIO_PORT_TYPE_SESSION = 3,
+    AUDIO_PORT_TYPE_DEVICE = 1, // (::android::hardware::audio::common::V4_0::AudioPortType.NONE implicitly + 1)
+    AUDIO_PORT_TYPE_MIX = 2, // (::android::hardware::audio::common::V4_0::AudioPortType.DEVICE implicitly + 1)
+    AUDIO_PORT_TYPE_SESSION = 3, // (::android::hardware::audio::common::V4_0::AudioPortType.MIX implicitly + 1)
 } audio_port_type_t;
 
 enum {
-    AUDIO_PORT_CONFIG_SAMPLE_RATE = 1u, // 0x1
-    AUDIO_PORT_CONFIG_CHANNEL_MASK = 2u, // 0x2
-    AUDIO_PORT_CONFIG_FORMAT = 4u, // 0x4
-    AUDIO_PORT_CONFIG_GAIN = 8u, // 0x8
-    AUDIO_PORT_CONFIG_ALL = 15u, // (((SAMPLE_RATE | CHANNEL_MASK) | FORMAT) | GAIN)
+    AUDIO_PORT_CONFIG_SAMPLE_RATE  = 0x1u,
+    AUDIO_PORT_CONFIG_CHANNEL_MASK = 0x2u,
+    AUDIO_PORT_CONFIG_FORMAT       = 0x4u,
+    AUDIO_PORT_CONFIG_GAIN         = 0x8u,
 };
 
 typedef enum {
     AUDIO_LATENCY_LOW = 0,
-    AUDIO_LATENCY_NORMAL = 1,
+    AUDIO_LATENCY_NORMAL = 1, // (::android::hardware::audio::common::V4_0::AudioMixLatencyClass.LOW implicitly + 1)
 } audio_mix_latency_class_t;
 
 #ifdef __cplusplus
 }
 #endif
 
-#endif  // HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_COMMON_V2_0_EXPORTED_CONSTANTS_H_
+#endif  // HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_COMMON_V4_0_EXPORTED_CONSTANTS_H_
index e683d60..8e15410 100644 (file)
@@ -27,6 +27,7 @@
 #include <cutils/bitops.h>
 
 #include "audio-base.h"
+#include "audio-base-utils.h"
 
 __BEGIN_DECLS
 
@@ -43,20 +44,6 @@ __BEGIN_DECLS
 /* AudioFlinger and AudioPolicy services use I/O handles to identify audio sources and sinks */
 typedef int audio_io_handle_t;
 
-/* Do not change these values without updating their counterparts
- * in frameworks/base/media/java/android/media/AudioAttributes.java
- */
-typedef enum {
-    AUDIO_CONTENT_TYPE_UNKNOWN      = 0,
-    AUDIO_CONTENT_TYPE_SPEECH       = 1,
-    AUDIO_CONTENT_TYPE_MUSIC        = 2,
-    AUDIO_CONTENT_TYPE_MOVIE        = 3,
-    AUDIO_CONTENT_TYPE_SONIFICATION = 4,
-
-    AUDIO_CONTENT_TYPE_CNT,
-    AUDIO_CONTENT_TYPE_MAX          = AUDIO_CONTENT_TYPE_CNT - 1,
-} audio_content_type_t;
-
 typedef uint32_t audio_flags_mask_t;
 
 /* Do not change these values without updating their counterparts
@@ -84,7 +71,7 @@ typedef struct {
     audio_source_t       source;
     audio_flags_mask_t   flags;
     char                 tags[AUDIO_ATTRIBUTES_TAGS_MAX_SIZE]; /* UTF8 */
-} audio_attributes_t;
+} __attribute__((packed)) audio_attributes_t; // sent through Binder;
 
 /* a unique ID allocated by AudioFlinger for use as an audio_io_handle_t, audio_session_t,
  * effect ID (int), audio_module_handle_t, and audio_patch_handle_t.
@@ -198,6 +185,21 @@ static inline audio_channel_mask_t audio_channel_mask_from_representation_and_bi
     return (audio_channel_mask_t) ((representation << AUDIO_CHANNEL_COUNT_MAX) | bits);
 }
 
+/**
+ * Expresses the convention when stereo audio samples are stored interleaved
+ * in an array.  This should improve readability by allowing code to use
+ * symbolic indices instead of hard-coded [0] and [1].
+ *
+ * For multi-channel beyond stereo, the platform convention is that channels
+ * are interleaved in order from least significant channel mask bit to most
+ * significant channel mask bit, with unused bits skipped.  Any exceptions
+ * to this convention will be noted at the appropriate API.
+ */
+enum {
+    AUDIO_INTERLEAVE_LEFT = 0,
+    AUDIO_INTERLEAVE_RIGHT = 1,
+};
+
 /* This enum is deprecated */
 typedef enum {
     AUDIO_IN_ACOUSTICS_NONE          = 0,
@@ -225,6 +227,7 @@ typedef uint32_t audio_devices_t;
  * hardware playback
  * The version and size fields must be initialized by the caller by using
  * one of the constants defined here.
+ * Must be aligned to transmit as raw memory through Binder.
  */
 typedef struct {
     uint16_t version;                   // version of the info structure
@@ -240,7 +243,7 @@ typedef struct {
     uint32_t bit_width;
     uint32_t offload_buffer_size;       // offload fragment size
     audio_usage_t usage;
-} audio_offload_info_t;
+} __attribute__((aligned(8))) audio_offload_info_t;
 
 #define AUDIO_MAKE_OFFLOAD_INFO_VERSION(maj,min) \
             ((((maj) & 0xff) << 8) | ((min) & 0xff))
@@ -267,13 +270,14 @@ static const audio_offload_info_t AUDIO_INFO_INITIALIZER = {
 /* common audio stream configuration parameters
  * You should memset() the entire structure to zero before use to
  * ensure forward compatibility
+ * Must be aligned to transmit as raw memory through Binder.
  */
-struct audio_config {
+struct __attribute__((aligned(8))) audio_config {
     uint32_t sample_rate;
     audio_channel_mask_t channel_mask;
     audio_format_t  format;
     audio_offload_info_t offload_info;
-    size_t frame_count;
+    uint32_t frame_count;
 };
 typedef struct audio_config audio_config_t;
 
@@ -320,6 +324,11 @@ typedef int audio_module_handle_t;
  *  Volume control
  *****************************/
 
+/** 3 dB headroom are allowed on float samples (3db = 10^(3/20) = 1.412538).
+* See: https://developer.android.com/reference/android/media/AudioTrack.html#write(float[], int, int, int)
+*/
+#define FLOAT_NOMINAL_RANGE_HEADROOM 1.412538
+
 /* If the audio hardware supports gain control on some audio paths,
  * the platform can expose them in the audio_policy.conf file. The audio HAL
  * will then implement gain control functions that will use the following data
@@ -524,6 +533,24 @@ struct audio_mmap_position {
                                     is called */
 };
 
+/** Metadata of a record track for an in stream. */
+typedef struct playback_track_metadata {
+    audio_usage_t usage;
+    audio_content_type_t content_type;
+    float gain; // Normalized linear volume. 0=silence, 1=0dbfs...
+} playback_track_metadata_t;
+
+/** Metadata of a playback track for an out stream. */
+typedef struct record_track_metadata {
+    audio_source_t source;
+    float gain; // Normalized linear volume. 0=silence, 1=0dbfs...
+} record_track_metadata_t;
+
+
+/******************************
+ *  Helper functions
+ *****************************/
+
 static inline bool audio_is_output_device(audio_devices_t device)
 {
     if (((device & AUDIO_DEVICE_BIT_IN) == 0) &&
@@ -586,6 +613,11 @@ static inline bool audio_is_bluetooth_sco_device(audio_devices_t device)
     return false;
 }
 
+static inline bool audio_is_hearing_aid_out_device(audio_devices_t device)
+{
+    return device == AUDIO_DEVICE_OUT_HEARING_AID;
+}
+
 static inline bool audio_is_usb_out_device(audio_devices_t device)
 {
     return ((popcount(device) == 1) && (device & AUDIO_DEVICE_OUT_ALL_USB));
@@ -805,6 +837,28 @@ static inline audio_channel_mask_t audio_channel_in_mask_from_count(uint32_t cha
             AUDIO_CHANNEL_REPRESENTATION_POSITION, bits);
 }
 
+static inline audio_channel_mask_t audio_channel_mask_in_to_out(audio_channel_mask_t in)
+{
+    switch (in) {
+    case AUDIO_CHANNEL_IN_MONO:
+        return AUDIO_CHANNEL_OUT_MONO;
+    case AUDIO_CHANNEL_IN_STEREO:
+        return AUDIO_CHANNEL_OUT_STEREO;
+    case AUDIO_CHANNEL_IN_5POINT1:
+        return AUDIO_CHANNEL_OUT_5POINT1;
+    case AUDIO_CHANNEL_IN_3POINT1POINT2:
+        return AUDIO_CHANNEL_OUT_3POINT1POINT2;
+    case AUDIO_CHANNEL_IN_3POINT0POINT2:
+        return AUDIO_CHANNEL_OUT_3POINT0POINT2;
+    case AUDIO_CHANNEL_IN_2POINT1POINT2:
+        return AUDIO_CHANNEL_OUT_2POINT1POINT2;
+    case AUDIO_CHANNEL_IN_2POINT0POINT2:
+        return AUDIO_CHANNEL_OUT_2POINT0POINT2;
+    default:
+        return AUDIO_CHANNEL_INVALID;
+    }
+}
+
 static inline bool audio_is_valid_format(audio_format_t format)
 {
     switch (format & AUDIO_FORMAT_MAIN_MASK) {
@@ -828,6 +882,8 @@ static inline bool audio_is_valid_format(audio_format_t format)
     case AUDIO_FORMAT_AAC_ADTS:
     case AUDIO_FORMAT_HE_AAC_V1:
     case AUDIO_FORMAT_HE_AAC_V2:
+    case AUDIO_FORMAT_AAC_ELD:
+    case AUDIO_FORMAT_AAC_XHE:
     case AUDIO_FORMAT_VORBIS:
     case AUDIO_FORMAT_OPUS:
     case AUDIO_FORMAT_AC3:
@@ -852,6 +908,10 @@ static inline bool audio_is_valid_format(audio_format_t format)
     case AUDIO_FORMAT_DSD:
     case AUDIO_FORMAT_AC4:
     case AUDIO_FORMAT_LDAC:
+    case AUDIO_FORMAT_E_AC3_JOC:
+    case AUDIO_FORMAT_MAT_1_0:
+    case AUDIO_FORMAT_MAT_2_0:
+    case AUDIO_FORMAT_MAT_2_1:
         return true;
     default:
         return false;
@@ -919,6 +979,12 @@ static inline size_t audio_bytes_per_sample(audio_format_t format)
     return size;
 }
 
+static inline size_t audio_bytes_per_frame(uint32_t channel_count, audio_format_t format)
+{
+    // cannot overflow for reasonable channel_count
+    return channel_count * audio_bytes_per_sample(format);
+}
+
 /* converts device address to string sent to audio HAL via set_parameters */
 static inline char *audio_device_address_to_parameter(audio_devices_t device, const char *address)
 {
@@ -965,6 +1031,84 @@ typedef struct audio_uuid_s {
     uint8_t node[6];
 } audio_uuid_t;
 
+//TODO: audio_microphone_location_t need to move to HAL v4.0
+typedef enum {
+    AUDIO_MICROPHONE_LOCATION_UNKNOWN = 0,
+    AUDIO_MICROPHONE_LOCATION_MAINBODY = 1,
+    AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE = 2,
+    AUDIO_MICROPHONE_LOCATION_PERIPHERAL = 3,
+    AUDIO_MICROPHONE_LOCATION_CNT = 4,
+} audio_microphone_location_t;
+
+//TODO: audio_microphone_directionality_t need to move to HAL v4.0
+typedef enum {
+    AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN = 0,
+    AUDIO_MICROPHONE_DIRECTIONALITY_OMNI = 1,
+    AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL = 2,
+    AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID = 3,
+    AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID = 4,
+    AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID = 5,
+    AUDIO_MICROPHONE_DIRECTIONALITY_CNT = 6,
+} audio_microphone_directionality_t;
+
+/* A 3D point which could be used to represent geometric location
+ * or orientation of a microphone.
+ */
+struct audio_microphone_coordinate {
+    float x;
+    float y;
+    float z;
+};
+
+/* An number to indicate which group the microphone locate. Main body is
+ * usually group 0. Developer could use this value to group the microphones
+ * that locate on the same peripheral or attachments.
+ */
+typedef int audio_microphone_group_t;
+
+typedef enum {
+    AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED = 0,
+    AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT = 1,
+    AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED = 2,
+    AUDIO_MICROPHONE_CHANNEL_MAPPING_CNT = 3,
+} audio_microphone_channel_mapping_t;
+
+/* the maximum length for the microphone id */
+#define AUDIO_MICROPHONE_ID_MAX_LEN 32
+/* max number of frequency responses in a frequency response table */
+#define AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES 256
+/* max number of microphone */
+#define AUDIO_MICROPHONE_MAX_COUNT 32
+/* the value of unknown spl */
+#define AUDIO_MICROPHONE_SPL_UNKNOWN -FLT_MAX
+/* the value of unknown sensitivity */
+#define AUDIO_MICROPHONE_SENSITIVITY_UNKNOWN -FLT_MAX
+/* the value of unknown coordinate */
+#define AUDIO_MICROPHONE_COORDINATE_UNKNOWN -FLT_MAX
+/* the value used as address when the address of bottom microphone is empty */
+#define AUDIO_BOTTOM_MICROPHONE_ADDRESS "bottom"
+/* the value used as address when the address of back microphone is empty */
+#define AUDIO_BACK_MICROPHONE_ADDRESS "back"
+
+struct audio_microphone_characteristic_t {
+    char                               device_id[AUDIO_MICROPHONE_ID_MAX_LEN];
+    audio_port_handle_t                id;
+    audio_devices_t                    device;
+    char                               address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+    audio_microphone_channel_mapping_t channel_mapping[AUDIO_CHANNEL_COUNT_MAX];
+    audio_microphone_location_t        location;
+    audio_microphone_group_t           group;
+    unsigned int                       index_in_the_group;
+    float                              sensitivity;
+    float                              max_spl;
+    float                              min_spl;
+    audio_microphone_directionality_t  directionality;
+    unsigned int                       num_frequency_responses;
+    float frequency_responses[2][AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES];
+    struct audio_microphone_coordinate geometric_location;
+    struct audio_microphone_coordinate orientation;
+};
+
 __END_DECLS
 
 /**
@@ -982,6 +1126,21 @@ __END_DECLS
 #define AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX "r_submix"
 #define AUDIO_HARDWARE_MODULE_ID_CODEC_OFFLOAD "codec_offload"
 #define AUDIO_HARDWARE_MODULE_ID_STUB "stub"
+#define AUDIO_HARDWARE_MODULE_ID_HEARING_AID "hearing_aid"
+
+/**
+ * Multi-Stream Decoder (MSD) HAL service name. MSD HAL is used to mix
+ * encoded streams together with PCM streams, producing re-encoded
+ * streams or PCM streams.
+ *
+ * The service must register itself using this name, and audioserver
+ * tries to instantiate a device factory using this name as well.
+ * Note that the HIDL implementation library file name *must* have the
+ * suffix "msd" in order to be picked up by HIDL that is:
+ *
+ *   android.hardware.audio@x.x-implmsd.so
+ */
+#define AUDIO_HAL_SERVICE_NAME_MSD "msd"
 
 /**
  * Parameter definitions.
@@ -1019,6 +1178,12 @@ __END_DECLS
 #define AUDIO_PARAMETER_STREAM_INPUT_SOURCE "input_source"   /* audio_source_t */
 #define AUDIO_PARAMETER_STREAM_SAMPLING_RATE "sampling_rate" /* uint32_t */
 
+/* Request the presentation id to be decoded by a next gen audio decoder */
+#define AUDIO_PARAMETER_STREAM_PRESENTATION_ID "presentation_id" /* int32_t */
+
+/* Request the program id to be decoded by a next gen audio decoder */
+#define AUDIO_PARAMETER_STREAM_PROGRAM_ID "program_id"           /* int32_t */
+
 #define AUDIO_PARAMETER_DEVICE_CONNECT "connect"            /* audio_devices_t */
 #define AUDIO_PARAMETER_DEVICE_DISCONNECT "disconnect"      /* audio_devices_t */
 
@@ -1057,5 +1222,4 @@ __END_DECLS
 #define AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES  "delay_samples"
 #define AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES  "padding_samples"
 
-
 #endif  // ANDROID_AUDIO_CORE_H
index 572b788..7a6a593 100644 (file)
@@ -1,9 +1,9 @@
 // This file is autogenerated by hidl-gen. Do not edit manually.
-// Source: android.hardware.audio.effect@2.0
+// Source: android.hardware.audio.effect@4.0
 // Root: android.hardware:hardware/interfaces
 
-#ifndef HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EXPORTED_CONSTANTS_H_
-#define HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EXPORTED_CONSTANTS_H_
+#ifndef HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EXPORTED_CONSTANTS_H_
+#define HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EXPORTED_CONSTANTS_H_
 
 #ifdef __cplusplus
 extern "C" {
@@ -75,26 +75,25 @@ enum {
 
 typedef enum {
     EFFECT_BUFFER_ACCESS_WRITE = 0,
-    EFFECT_BUFFER_ACCESS_READ = 1,
-    EFFECT_BUFFER_ACCESS_ACCUMULATE = 2,
+    EFFECT_BUFFER_ACCESS_READ = 1, // (::android::hardware::audio::effect::V4_0::EffectBufferAccess.ACCESS_WRITE implicitly + 1)
+    EFFECT_BUFFER_ACCESS_ACCUMULATE = 2, // (::android::hardware::audio::effect::V4_0::EffectBufferAccess.ACCESS_READ implicitly + 1)
 } effect_buffer_access_e;
 
 enum {
-    EFFECT_CONFIG_BUFFER = 1, // 0x0001
-    EFFECT_CONFIG_SMP_RATE = 2, // 0x0002
-    EFFECT_CONFIG_CHANNELS = 4, // 0x0004
-    EFFECT_CONFIG_FORMAT = 8, // 0x0008
-    EFFECT_CONFIG_ACC_MODE = 16, // 0x0010
-    EFFECT_CONFIG_ALL = 31, // ((((BUFFER | SMP_RATE) | CHANNELS) | FORMAT) | ACC_MODE)
+    EFFECT_CONFIG_BUFFER = 1,
+    EFFECT_CONFIG_SMP_RATE = 2,
+    EFFECT_CONFIG_CHANNELS = 4,
+    EFFECT_CONFIG_FORMAT = 8,
+    EFFECT_CONFIG_ACC_MODE = 16,
 };
 
 typedef enum {
     EFFECT_FEATURE_AUX_CHANNELS = 0,
-    EFFECT_FEATURE_CNT = 1,
+    EFFECT_FEATURE_CNT = 1, // (::android::hardware::audio::effect::V4_0::EffectFeature.AUX_CHANNELS implicitly + 1)
 } effect_feature_e;
 
 #ifdef __cplusplus
 }
 #endif
 
-#endif  // HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_EFFECT_V2_0_EXPORTED_CONSTANTS_H_
+#endif  // HIDL_GENERATED_ANDROID_HARDWARE_AUDIO_EFFECT_V4_0_EXPORTED_CONSTANTS_H_
index d7bb1a9..4cdc773 100644 (file)
@@ -60,6 +60,12 @@ typedef struct effect_descriptor_s {
     char    implementor[EFFECT_STRING_LEN_MAX];    // human readable effect implementor name
 } effect_descriptor_t;
 
+#define EFFECT_CONFIG_ALL (EFFECT_CONFIG_BUFFER | \
+                           EFFECT_CONFIG_SMP_RATE | \
+                           EFFECT_CONFIG_CHANNELS | \
+                           EFFECT_CONFIG_FORMAT | \
+                           EFFECT_CONFIG_ACC_MODE)
+
 /////////////////////////////////////////////////
 //      Effect control interface
 /////////////////////////////////////////////////
@@ -429,6 +435,7 @@ typedef struct audio_buffer_s {
     size_t   frameCount;        // number of frames in buffer
     union {
         void*       raw;        // raw pointer to start of buffer
+        float*      f32;        // pointer to float 32 bit data at start of buffer
         int32_t*    s32;        // pointer to signed 32 bit data at start of buffer
         int16_t*    s16;        // pointer to signed 16 bit data at start of buffer
         uint8_t*    u8;         // pointer to unsigned 8 bit data at start of buffer
diff --git a/audio/include/system/audio_effects/effect_dynamicsprocessing.h b/audio/include/system/audio_effects/effect_dynamicsprocessing.h
new file mode 100644 (file)
index 0000000..346e830
--- /dev/null
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_EFFECT_DYNAMICSPROCESSING_CORE_H_
+#define ANDROID_EFFECT_DYNAMICSPROCESSING_CORE_H_
+
+#include <system/audio_effect.h>
+
+#if __cplusplus
+extern "C" {
+#endif
+
+#ifndef OPENSL_ES_H_
+static const effect_uuid_t SL_IID_DYNAMICSPROCESSING_ = { 0x7261676f, 0x6d75, 0x7369, 0x6364,
+        { 0x28, 0xe2, 0xfd, 0x3a, 0xc3, 0x9e } };
+const effect_uuid_t * const SL_IID_DYNAMICSPROCESSING = &SL_IID_DYNAMICSPROCESSING_;
+#endif //OPENSL_ES_H_
+
+/* enumerated parameters for dynamics processing effect */
+typedef enum
+{
+    DP_PARAM_GET_CHANNEL_COUNT = 0x10,
+    DP_PARAM_INPUT_GAIN = 0x20,
+    DP_PARAM_ENGINE_ARCHITECTURE = 0x30,
+    DP_PARAM_PRE_EQ = 0x40,
+    DP_PARAM_PRE_EQ_BAND = 0x45,
+    DP_PARAM_MBC = 0x50,
+    DP_PARAM_MBC_BAND = 0x55,
+    DP_PARAM_POST_EQ = 0x60,
+    DP_PARAM_POST_EQ_BAND = 0x65,
+    DP_PARAM_LIMITER = 0x70,
+} t_dynamicsprocessing_params;
+
+/* enumerated variants */
+typedef enum
+{
+    VARIANT_FAVOR_FREQUENCY_RESOLUTION = 0x00,
+    VARIANT_FAVOR_TIME_RESOLUTION = 0x01,
+} t_dynamicsprocessing_variants;
+
+#if __cplusplus
+} // extern "C"
+#endif
+
+
+#endif /*ANDROID_EFFECT_DYNAMICSPROCESSING_CORE_H_*/
index 2e2e90b..110b9c6 100644 (file)
@@ -67,6 +67,7 @@ typedef enum {
     AUDIO_POLICY_FORCE_FOR_SYSTEM,
     AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO,
     AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND,
+    AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING,
 
     AUDIO_POLICY_FORCE_USE_CNT,
     AUDIO_POLICY_FORCE_USE_MAX = AUDIO_POLICY_FORCE_USE_CNT - 1,
index c5156d9..6182ff3 100644 (file)
@@ -137,8 +137,9 @@ struct sound_trigger_generic_sound_model {
 
 /*
  * Generic recognition event sent via recognition callback
+ * Must be aligned to transmit as raw memory through Binder.
  */
-struct sound_trigger_recognition_event {
+struct __attribute__((aligned(8))) sound_trigger_recognition_event {
     int                              status;            /* recognition status e.g.
                                                            RECOGNITION_STATUS_SUCCESS */
     sound_trigger_sound_model_type_t type;              /* event type, same as sound model type.
diff --git a/audio_effects/include/audio_effects/effect_dynamicsprocessing.h b/audio_effects/include/audio_effects/effect_dynamicsprocessing.h
new file mode 100644 (file)
index 0000000..d350d73
--- /dev/null
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * USAGE NOTE: Only include this header when _implementing_ a particular
+ * effect. When access to UUID and properties is enough, include the
+ * corresponding header from system/audio_effects/, which doesn't include
+ * hardware/audio_effect.h.
+ *
+ * Only code that immediately calls into HAL or implements an effect
+ * can import hardware/audio_effect.h.
+ */
+
+#ifndef ANDROID_EFFECT_DYNAMICSPROCESSING_H_
+#define ANDROID_EFFECT_DYNAMICSPROCESSING_H_
+
+#include <hardware/audio_effect.h>
+#include <system/audio_effects/effect_dynamicsprocessing.h>
+
+#endif /*ANDROID_EFFECT_DYNAMICSPROCESSING_H_*/
index 123e7c6..700af61 100644 (file)
@@ -155,8 +155,14 @@ static void path_free(struct audio_route *ar)
     for (i = 0; i < ar->num_mixer_paths; i++) {
         free(ar->mixer_path[i].name);
         if (ar->mixer_path[i].setting) {
-            free(ar->mixer_path[i].setting->value.ptr);
+            size_t j;
+            for (j = 0; j < ar->mixer_path[i].length; j++) {
+                free(ar->mixer_path[i].setting[j].value.ptr);
+            }
             free(ar->mixer_path[i].setting);
+            ar->mixer_path[i].size = 0;
+            ar->mixer_path[i].length = 0;
+            ar->mixer_path[i].setting = NULL;
         }
     }
     free(ar->mixer_path);
@@ -476,12 +482,14 @@ static void start_tag(void *data, const XML_Char *tag_name,
             if (state->level == 1) {
                 /* top level path: create and stash the path */
                 state->path = path_create(ar, (char *)attr_name);
+                if (state->path == NULL)
+                    ALOGE("path created failed, please check the path if existed");
             } else {
                 /* nested path */
                 struct mixer_path *sub_path = path_get_by_name(ar, attr_name);
                 if (!sub_path) {
                     ALOGE("unable to find sub path '%s'", attr_name);
-                } else {
+                } else if (state->path != NULL) {
                     path_add_path(ar, state->path, sub_path);
                 }
             }
@@ -556,7 +564,8 @@ static void start_tag(void *data, const XML_Char *tag_name,
                 mixer_value.index = atoi((char *)attr_id);
             else
                 mixer_value.index = -1;
-            path_add_value(ar, state->path, &mixer_value);
+            if (state->path != NULL)
+                path_add_value(ar, state->path, &mixer_value);
         }
     }
 
@@ -775,7 +784,6 @@ int audio_route_reset_path(struct audio_route *ar, const char *name)
 static int audio_route_update_path(struct audio_route *ar, const char *name, bool reverse)
 {
     struct mixer_path *path;
-    int32_t i, end;
     unsigned int j;
 
     if (!ar) {
@@ -789,14 +797,12 @@ static int audio_route_update_path(struct audio_route *ar, const char *name, boo
         return -1;
     }
 
-    i = reverse ? (path->length - 1) : 0;
-    end = reverse ? -1 : (int32_t)path->length;
 
-    while (i != end) {
+    for (size_t i = 0; i < path->length; ++i) {
         unsigned int ctl_index;
         enum mixer_ctl_type type;
 
-        ctl_index = path->setting[i].ctl_index;
+        ctl_index = path->setting[reverse ? path->length - 1 - i : i].ctl_index;
 
         struct mixer_state * ms = &ar->mixer_state[ctl_index];
 
@@ -827,8 +833,6 @@ static int audio_route_update_path(struct audio_route *ar, const char *name, boo
                 break;
             }
         }
-
-        i = reverse ? (i - 1) : (i + 1);
     }
     return 0;
 }
index 91d18fd..83a74b8 100644 (file)
@@ -86,6 +86,40 @@ static inline uint8x3_t int32_to_uint8x3(int32_t in) {
     return num_out_samples * sizeof(*(out_buff)); \
 }
 
+/* Channel expands from an input buffer to an output buffer.
+ * See expand_selected_channels() function below for parameter definitions.
+ * Selected channels are replaced in the output buffer, with any extra channels
+ * per frame left alone.
+ *
+ * Move from back to front so that the conversion can be done in-place
+ * i.e. in_buff == out_buff
+ * NOTE: num_in_bytes must be a multiple of in_buff_channels * in_buff_sample_size.
+ */
+/* This is written as a C macro because it operates on generic types,
+ * which in a C++ file could be alternatively achieved by a "template"
+ * or an "auto" declaration.
+ * TODO: convert this from a C file to a C++ file.
+ */
+#define EXPAND_SELECTED_CHANNELS( \
+        in_buff, in_buff_chans, out_buff, out_buff_chans, num_in_bytes) \
+{ \
+    size_t num_in_samples = (num_in_bytes) / sizeof(*(in_buff)); \
+    size_t num_out_samples = (num_in_samples * (out_buff_chans)) / (in_buff_chans); \
+    typeof(out_buff) dst_ptr = (out_buff) + num_out_samples - 1; \
+    size_t src_index; \
+    typeof(in_buff) src_ptr = (in_buff) + num_in_samples - 1; \
+    size_t num_extra_chans = (out_buff_chans) - (in_buff_chans); \
+    for (src_index = 0; src_index < num_in_samples; src_index += (in_buff_chans)) { \
+        dst_ptr -= num_extra_chans; \
+        for (size_t dst_offset = num_extra_chans; dst_offset < (out_buff_chans); dst_offset++) { \
+            *dst_ptr-- = *src_ptr--; \
+        } \
+    } \
+    /* return number of *bytes* generated */ \
+    return num_out_samples * sizeof(*(out_buff)); \
+}
+
+
 /* Channel expands from a MONO input buffer to a MULTICHANNEL output buffer by duplicating the
  * single input channel to the first 2 output channels and 0-filling the remaining.
  * See expand_channels() function below for parameter definitions.
@@ -375,3 +409,75 @@ size_t adjust_channels(const void* in_buff, size_t in_buff_chans,
 
     return num_in_bytes;
 }
+
+/*
+ * Convert a buffer of N-channel, interleaved samples to M-channel
+ * (where N < M).
+ *   in_buff points to the buffer of samples
+ *   in_buff_channels Specifies the number of channels in the input buffer.
+ *   out_buff points to the buffer to receive converted samples.
+ *   out_buff_channels Specifies the number of channels in the output buffer.
+ *   sample_size_in_bytes Specifies the number of bytes per sample.
+ *   num_in_bytes size of input buffer in BYTES
+ * returns
+ *   the number of BYTES of output data.
+ * NOTE
+ *   channels > N are left alone in out_buff.
+ *   The out and in buffers must either be completely separate (non-overlapping), or
+ *   they must both start at the same address. Partially overlapping buffers are not supported.
+ */
+static size_t expand_selected_channels(const void* in_buff, size_t in_buff_chans,
+                              void* out_buff, size_t out_buff_chans,
+                              unsigned sample_size_in_bytes, size_t num_in_bytes)
+{
+    switch (sample_size_in_bytes) {
+    case 1:
+
+        EXPAND_SELECTED_CHANNELS((const uint8_t*)in_buff, in_buff_chans,
+                        (uint8_t*)out_buff, out_buff_chans,
+                        num_in_bytes);
+        // returns in macro
+
+    case 2:
+
+        EXPAND_SELECTED_CHANNELS((const int16_t*)in_buff, in_buff_chans,
+                        (int16_t*)out_buff, out_buff_chans,
+                        num_in_bytes);
+        // returns in macro
+
+    case 3:
+
+        EXPAND_SELECTED_CHANNELS((const uint8x3_t*)in_buff, in_buff_chans,
+                        (uint8x3_t*)out_buff, out_buff_chans,
+                        num_in_bytes);
+        // returns in macro
+
+    case 4:
+
+        EXPAND_SELECTED_CHANNELS((const int32_t*)in_buff, in_buff_chans,
+                        (int32_t*)out_buff, out_buff_chans,
+                        num_in_bytes);
+        // returns in macro
+
+    default:
+        return 0;
+    }
+}
+
+size_t adjust_selected_channels(const void* in_buff, size_t in_buff_chans,
+                       void* out_buff, size_t out_buff_chans,
+                       unsigned sample_size_in_bytes, size_t num_in_bytes)
+{
+    if (out_buff_chans > in_buff_chans) {
+        return expand_selected_channels(in_buff, in_buff_chans, out_buff, out_buff_chans,
+                               sample_size_in_bytes, num_in_bytes);
+    } else if (out_buff_chans < in_buff_chans) {
+        return contract_channels(in_buff, in_buff_chans, out_buff, out_buff_chans,
+                                 sample_size_in_bytes, num_in_bytes);
+    } else if (in_buff != out_buff) {
+        memcpy(out_buff, in_buff, num_in_bytes);
+    }
+
+    return num_in_bytes;
+}
+
index 1803e3c..50872fc 100644 (file)
@@ -34,7 +34,10 @@ void memcpy_by_audio_format(void *dst, audio_format_t dst_format,
         case AUDIO_FORMAT_PCM_24_BIT_PACKED:
         case AUDIO_FORMAT_PCM_32_BIT:
         case AUDIO_FORMAT_PCM_8_24_BIT:
-            memcpy(dst, src, count * audio_bytes_per_sample(dst_format));
+            if (dst != src) {
+                // TODO: should assert if memory regions overlap.
+                memcpy(dst, src, count * audio_bytes_per_sample(dst_format));
+            }
             return;
         default:
             break;
index 573cab7..10026f4 100644 (file)
@@ -45,6 +45,30 @@ size_t adjust_channels(const void* in_buff, size_t in_buff_chans,
                        void* out_buff, size_t out_buff_chans,
                        unsigned sample_size_in_bytes, size_t num_in_bytes);
 
+/**
+ * Expands or contracts sample data from one interleaved channel format to another.
+ * Extra expanded channels are left alone in the output buffer.
+ * Contracted channels are omitted from the end of each audio frame.
+ *
+ *   \param in_buff              points to the buffer of samples
+ *   \param in_buff_chans        Specifies the number of channels in the input buffer.
+ *   \param out_buff             points to the buffer to receive converted samples.
+ *   \param out_buff_chans       Specifies the number of channels in the output buffer.
+ *   \param sample_size_in_bytes Specifies the number of bytes per sample. 1, 2, 3, 4 are
+ *     currently valid.
+ *   \param num_in_bytes         size of input buffer in BYTES
+ *
+ * \return
+ *   the number of BYTES of output data or 0 if an error occurs.
+ *
+ * \note
+ *   The out and in buffers must either be completely separate (non-overlapping), or
+ *   they must both start at the same address. Partially overlapping buffers are not supported.
+ */
+size_t adjust_selected_channels(const void* in_buff, size_t in_buff_chans,
+                       void* out_buff, size_t out_buff_chans,
+                       unsigned sample_size_in_bytes, size_t num_in_bytes);
+
 /** \cond */
 __END_DECLS
 /** \endcond */
index 31bf1f6..b3298e7 100644 (file)
 #include <sys/time.h>
 #include <time.h>
 
+// These are declared as macros for compatbility with existing uses.
+// TODO Spell out the words in full.
+#define MICROS_PER_SECOND      1000000LL
+#define MILLIS_PER_SECOND         1000LL
+#define NANOS_PER_MICROSECOND     1000LL
+#define NANOS_PER_MILLISECOND  1000000LL
+#define NANOS_PER_SECOND    1000000000LL
+
 /**
  * \brief Converts time in ns to a time string, with format similar to logcat.
  * \param ns          input time in nanoseconds to convert.
index dfcfecd..842bbf0 100644 (file)
@@ -54,8 +54,8 @@ __BEGIN_DECLS
  * 2) Both dst_format and src_format are identical and of the list given
  * in (1). This is a straight copy.
  *
- * The destination and source buffers must be completely separate if the destination
- * format size is larger than the source format size. These routines call functions
+ * The destination and source buffers must be completely separate
+ * or point to the same starting buffer address. These routines call functions
  * in primitives.h, so descriptions of detailed behavior can be reviewed there.
  *
  * Logs a fatal error if dst or src format is not allowed by the conversion rules above.
index e0f952e..a3727d7 100644 (file)
@@ -17,6 +17,7 @@
 #ifndef ANDROID_AUDIO_PRIMITIVES_H
 #define ANDROID_AUDIO_PRIMITIVES_H
 
+#include <math.h>
 #include <stdint.h>
 #include <stdlib.h>
 #include <sys/cdefs.h>
@@ -34,6 +35,9 @@ __BEGIN_DECLS
  */
 
 /**
+ * Deprecated. Use memcpy_to_i16_from_q4_27() instead (double the pairs for the count).
+ * Neither this function nor memcpy_to_i16_from_q4_27() actually dither.
+ *
  * Dither and clamp pairs of 32-bit input samples (sums) to 16-bit output samples (out).
  * Each 32-bit input sample can be viewed as a signed fixed-point Q19.12 of which the
  * .12 fraction bits are dithered and the 19 integer bits are clamped to signed 16 bits.
@@ -41,15 +45,27 @@ __BEGIN_DECLS
  * is dithered and the remaining fraction is converted to the output Q.15, with clamping
  * on the 4 integer guard bits.
  *
- * For interleaved stereo, c is the number of sample pairs,
+ * For interleaved stereo, pairs is the number of sample pairs,
  * and out is an array of interleaved pairs of 16-bit samples per channel.
- * For mono, c is the number of samples / 2, and out is an array of 16-bit samples.
+ * For mono, pairs is the number of samples / 2, and out is an array of 16-bit samples.
  * The name "dither" is a misnomer; the current implementation does not actually dither
  * but uses truncation.  This may change.
  * The out and sums buffers must either be completely separate (non-overlapping), or
  * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
-void ditherAndClamp(int32_t* out, const int32_t *sums, size_t c);
+void ditherAndClamp(int32_t *out, const int32_t *sums, size_t pairs);
+
+/**
+ * Copy samples from signed fixed-point 32-bit Q4.27 to 16-bit Q0.15
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to copy
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void memcpy_to_i16_from_q4_27(int16_t *dst, const int32_t *src, size_t count);
 
 /**
  * Expand and copy samples from unsigned 8-bit offset by 0x80 to signed 16-bit.
@@ -141,7 +157,8 @@ void memcpy_to_float_from_q4_27(float *dst, const int32_t *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_float_from_i16(float *dst, const int16_t *src, size_t count);
 
@@ -154,7 +171,8 @@ void memcpy_to_float_from_i16(float *dst, const int16_t *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_float_from_u8(float *dst, const uint8_t *src, size_t count);
 
@@ -168,7 +186,8 @@ void memcpy_to_float_from_u8(float *dst, const uint8_t *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_float_from_p24(float *dst, const uint8_t *src, size_t count);
 
@@ -195,7 +214,8 @@ void memcpy_to_i16_from_p24(int16_t *dst, const uint8_t *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_i32_from_p24(int32_t *dst, const uint8_t *src, size_t count);
 
@@ -209,7 +229,8 @@ void memcpy_to_i32_from_p24(int32_t *dst, const uint8_t *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_p24_from_i16(uint8_t *dst, const int16_t *src, size_t count);
 
@@ -237,7 +258,8 @@ void memcpy_to_p24_from_float(uint8_t *dst, const float *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.
  */
 void memcpy_to_p24_from_q8_23(uint8_t *dst, const int32_t *src, size_t count);
 
@@ -264,7 +286,8 @@ void memcpy_to_p24_from_i32(uint8_t *dst, const int32_t *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_q8_23_from_i16(int32_t *dst, const int16_t *src, size_t count);
 
@@ -291,7 +314,8 @@ void memcpy_to_q8_23_from_float_with_clamp(int32_t *dst, const float *src, size_
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_q8_23_from_p24(int32_t *dst, const uint8_t *src, size_t count);
 
@@ -347,7 +371,8 @@ void memcpy_to_float_from_q8_23(float *dst, const int32_t *src, size_t count);
  *  \param src     Source buffer
  *  \param count   Number of samples to copy
  *
- * The destination and source buffers must be completely separate.
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void memcpy_to_i32_from_i16(int32_t *dst, const int16_t *src, size_t count);
 
@@ -380,6 +405,22 @@ void memcpy_to_i32_from_float(int32_t *dst, const float *src, size_t count);
 void memcpy_to_float_from_i32(float *dst, const int32_t *src, size_t count);
 
 /**
+ * Copy samples from unrestricted float to range restricted float [-absMax, absMax].
+ * Any float sample not in the range [-absMax, absMax] will be clamped in this range.
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to copy
+ *  \param absMax  Maximum of the absolute value of the copied samples.
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ * Note: NAN is clamped to absMax and not 0 for performance reason (~2xfaster).
+ */
+void memcpy_to_float_from_float_with_clamping(float *dst, const float *src, size_t count,
+                                              float absMax);
+
+/**
  * Downmix pairs of interleaved stereo input 16-bit samples to mono output 16-bit samples.
  *
  *  \param dst     Destination buffer
@@ -399,7 +440,8 @@ void downmix_to_mono_i16_from_stereo_i16(int16_t *dst, const int16_t *src, size_
  *  \param src     Source buffer
  *  \param count   Number of mono samples to upmix
  *
- * The destination and source buffers must be completely separate (non-overlapping).
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void upmix_to_stereo_i16_from_mono_i16(int16_t *dst, const int16_t *src, size_t count);
 
@@ -424,7 +466,8 @@ void downmix_to_mono_float_from_stereo_float(float *dst, const float *src, size_
  *  \param src     Source buffer
  *  \param count   Number of mono samples to upmix
  *
- * The destination and source buffers must be completely separate (non-overlapping).
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
  */
 void upmix_to_stereo_float_from_mono_float(float *dst, const float *src, size_t count);
 
@@ -578,6 +621,78 @@ size_t memcpy_by_index_array_initialization_dst_index(int8_t *idxary, size_t idx
         uint32_t dst_mask, uint32_t src_mask);
 
 /**
+ * Add and clamp signed 16-bit samples.
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to add
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void accumulate_i16(int16_t *dst, const int16_t *src, size_t count);
+
+/**
+ * Add and clamp unsigned 8-bit samples.
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to add
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void accumulate_u8(uint8_t *dst, const uint8_t *src, size_t count);
+
+/**
+ * Add and clamp packed 24-bit Q0.23 samples.
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to add
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void accumulate_p24(uint8_t *dst, const uint8_t *src, size_t count);
+
+/**
+ * Add and clamp 32-bit Q8.23 samples.
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to add
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void accumulate_q8_23(int32_t *dst, const int32_t *src, size_t count);
+
+/**
+ * Add and clamp signed 32-bit Q0.31 samples.
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to add
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void accumulate_i32(int32_t *dst, const int32_t *src, size_t count);
+
+/**
+ * Add float samples. Result is not clamped.
+ *
+ *  \param dst     Destination buffer
+ *  \param src     Source buffer
+ *  \param count   Number of samples to add
+ *
+ * The destination and source buffers must either be completely separate (non-overlapping), or
+ * they must both start at the same address.  Partially overlapping buffers are not supported.
+ */
+void accumulate_float(float *dst, const float *src, size_t count);
+
+/**
  * Clamp (aka hard limit or clip) a signed 32-bit sample to 16-bit range.
  */
 static inline int16_t clamp16(int32_t sample)
@@ -588,6 +703,16 @@ static inline int16_t clamp16(int32_t sample)
 }
 
 /**
+ * Clamp (aka hard limit or clip) a signed 64-bit sample to 32-bit range.
+ */
+static inline int32_t clamp32(int64_t sample)
+{
+    if ((sample>>31) ^ (sample>>63))
+        sample = 0x7fffffff ^ (sample>>63);
+    return sample;
+}
+
+/**
  * Convert a IEEE 754 single precision float [-1.0, 1.0) to int16_t [-32768, 32767]
  * with clamping.  Note the open bound at 1.0, values within 1/65536 of 1.0 map
  * to 32767 instead of 32768 (early clamping due to the smaller positive integer subrange).
@@ -597,10 +722,12 @@ static inline int16_t clamp16(int32_t sample)
  * depending on the sign bit inside NaN (whose representation is not unique).
  * Nevertheless, strictly speaking, NaN behavior should be considered undefined.
  *
- * Rounding of 0.5 lsb is to even (default for IEEE 754).
+ * OLD code disabled: Rounding of 0.5 lsb is to even (default for IEEE 754).
+ * NEW code enabled: Rounding of 0.5 lsb is away from 0.
  */
 static inline int16_t clamp16_from_float(float f)
 {
+#if 0
     /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
      * floating point significand. The normal shift is 3<<22, but the -15 offset
      * is used to multiply by 32768.
@@ -624,6 +751,10 @@ static inline int16_t clamp16_from_float(float f)
     else if (u.i > limpos)
         u.i = 32767;
     return u.i; /* Return lower 16 bits, the part of interest in the significand. */
+#else
+    static const float scale = 1 << 15;
+    return roundf(fmaxf(fminf(f * scale, scale - 1.f), -scale));
+#endif
 }
 
 /**
@@ -636,10 +767,12 @@ static inline int16_t clamp16_from_float(float f)
  * depending on the sign bit inside NaN (whose representation is not unique).
  * Nevertheless, strictly speaking, NaN behavior should be considered undefined.
  *
- * Rounding of 0.5 lsb is to even (default for IEEE 754).
+ * OLD code disabled: Rounding of 0.5 lsb is to even (default for IEEE 754).
+ * NEW code enabled: Rounding of 0.5 lsb is away from 0.
  */
 static inline uint8_t clamp8_from_float(float f)
 {
+#if 0
     /* Offset is used to expand the valid range of [-1.0, 1.0) into the 16 lsbs of the
      * floating point significand. The normal shift is 3<<22, but the -7 offset
      * is used to multiply by 128.
@@ -663,13 +796,17 @@ static inline uint8_t clamp8_from_float(float f)
     if (u.i > limpos)
         return 255;
     return u.i; /* Return lower 8 bits, the part of interest in the significand. */
+#else
+    return roundf(fmaxf(fminf(f * 128.f + 128.f, 255.f), 0.f));
+#endif
 }
 
 /**
  * Convert a single-precision floating point value to a Q0.23 integer value, stored in a
  * 32 bit signed integer (technically stored as Q8.23, but clamped to Q0.23).
  *
- * Rounds to nearest, ties away from 0.
+ * OLD code disabled: Rounds to nearest, ties away from 0.
+ * NEW code enabled: Rounding of 0.5 lsb is away from 0.
  *
  * Values outside the range [-1.0, 1.0) are properly clamped to -8388608 and 8388607,
  * including -Inf and +Inf. NaN values are considered undefined, and behavior may change
@@ -677,6 +814,7 @@ static inline uint8_t clamp8_from_float(float f)
  */
 static inline int32_t clamp24_from_float(float f)
 {
+#if 0
     static const float scale = (float)(1 << 23);
     static const float limpos = 0x7fffff / scale;
     static const float limneg = -0x800000 / scale;
@@ -691,6 +829,10 @@ static inline int32_t clamp24_from_float(float f)
      * ensure that we round to nearest, ties away from 0.
      */
     return f > 0 ? f + 0.5 : f - 0.5;
+#else
+    static const float scale = 1 << 23;
+    return roundf(fmaxf(fminf(f * scale, scale - 1.f), -scale));
+#endif
 }
 
 /**
diff --git a/audio_utils/include/audio_utils/string.h b/audio_utils/include/audio_utils/string.h
new file mode 100644 (file)
index 0000000..806fcf8
--- /dev/null
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_STRING_H
+#define ANDROID_AUDIO_STRING_H
+
+#include <string.h>
+
+/** similar to strlcpy but also zero fills to end of string buffer, ensures no data leak
+    in parceled data sent over binder.*/
+inline size_t audio_utils_strlcpy_zerofill(char *dst, const char *src, size_t dst_size) {
+    const size_t srclen = strlcpy(dst, src, dst_size);
+    const size_t srclen_with_zero = srclen + 1; /* include zero termination in length. */
+    if (srclen_with_zero < dst_size) {
+        const size_t num_zeroes = dst_size - srclen_with_zero;
+        memset(dst + srclen_with_zero, 0 /* value */, num_zeroes); /* clear remaining buffer */
+    }
+    return srclen;
+}
+
+#ifdef __cplusplus
+
+/** similar to audio_utils_strlcpy_zerofill for fixed size destination string. */
+template <size_t size>
+inline size_t audio_utils_strlcpy_zerofill(char (&dst)[size], const char *src) {
+    return audio_utils_strlcpy_zerofill(dst, src, size);
+}
+
+/** similar to strlcpy for fixed size destination string. */
+template <size_t size>
+inline size_t audio_utils_strlcpy(char (&dst)[size], const char *src) {
+    return strlcpy(dst, src, size);
+}
+
+#endif // __cplusplus
+
+#endif // !ANDROID_AUDIO_STRING_H
index f4bd645..594f1c5 100644 (file)
 #include <audio_utils/primitives.h>
 #include "private/private.h"
 
-void ditherAndClamp(int32_t* out, const int32_t *sums, size_t c)
+void ditherAndClamp(int32_t *out, const int32_t *sums, size_t pairs)
 {
-    size_t i;
-    for (i=0 ; i<c ; i++) {
-        int32_t l = *sums++;
-        int32_t r = *sums++;
-        int32_t nl = l >> 12;
-        int32_t nr = r >> 12;
-        l = clamp16(nl);
-        r = clamp16(nr);
-        *out++ = (r<<16) | (l & 0xFFFF);
+    for (; pairs > 0; --pairs) {
+        const int32_t l = clamp16(*sums++ >> 12);
+        const int32_t r = clamp16(*sums++ >> 12);
+        *out++ = (r << 16) | (l & 0xFFFF);
+    }
+}
+
+void memcpy_to_i16_from_q4_27(int16_t *dst, const int32_t *src, size_t count)
+{
+    for (; count > 0; --count) {
+        *dst++ = clamp16(*src++ >> 12);
     }
 }
 
@@ -36,71 +38,77 @@ void memcpy_to_i16_from_u8(int16_t *dst, const uint8_t *src, size_t count)
 {
     dst += count;
     src += count;
-    while (count--) {
+    for (; count > 0; --count) {
         *--dst = (int16_t)(*--src - 0x80) << 8;
     }
 }
 
 void memcpy_to_u8_from_i16(uint8_t *dst, const int16_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = (*src++ >> 8) + 0x80;
     }
 }
 
 void memcpy_to_u8_from_float(uint8_t *dst, const float *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = clamp8_from_float(*src++);
     }
 }
 
 void memcpy_to_i16_from_i32(int16_t *dst, const int32_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = *src++ >> 16;
     }
 }
 
 void memcpy_to_i16_from_float(int16_t *dst, const float *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = clamp16_from_float(*src++);
     }
 }
 
 void memcpy_to_float_from_q4_27(float *dst, const int32_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = float_from_q4_27(*src++);
     }
 }
 
 void memcpy_to_float_from_i16(float *dst, const int16_t *src, size_t count)
 {
-    while (count--) {
-        *dst++ = float_from_i16(*src++);
+    dst += count;
+    src += count;
+    for (; count > 0; --count) {
+        *--dst = float_from_i16(*--src);
     }
 }
 
 void memcpy_to_float_from_u8(float *dst, const uint8_t *src, size_t count)
 {
-    while (count--) {
-        *dst++ = float_from_u8(*src++);
+    dst += count;
+    src += count;
+    for (; count > 0; --count) {
+        *--dst = float_from_u8(*--src);
     }
 }
 
 void memcpy_to_float_from_p24(float *dst, const uint8_t *src, size_t count)
 {
-    while (count--) {
-        *dst++ = float_from_p24(src);
-        src += 3;
+    dst += count;
+    src += count * 3;
+    for (; count > 0; --count) {
+        src -= 3;
+        *--dst = float_from_p24(src);
     }
 }
 
 void memcpy_to_i16_from_p24(int16_t *dst, const uint8_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
 #if HAVE_BIG_ENDIAN
         *dst++ = src[1] | (src[0] << 8);
 #else
@@ -112,34 +120,40 @@ void memcpy_to_i16_from_p24(int16_t *dst, const uint8_t *src, size_t count)
 
 void memcpy_to_i32_from_p24(int32_t *dst, const uint8_t *src, size_t count)
 {
-    while (count--) {
+    dst += count;
+    src += count * 3;
+    for (; count > 0; --count) {
+        src -= 3;
 #if HAVE_BIG_ENDIAN
-        *dst++ = (src[2] << 8) | (src[1] << 16) | (src[0] << 24);
+        *--dst = (src[2] << 8) | (src[1] << 16) | (src[0] << 24);
 #else
-        *dst++ = (src[0] << 8) | (src[1] << 16) | (src[2] << 24);
+        *--dst = (src[0] << 8) | (src[1] << 16) | (src[2] << 24);
 #endif
-        src += 3;
     }
 }
 
 void memcpy_to_p24_from_i16(uint8_t *dst, const int16_t *src, size_t count)
 {
-    while (count--) {
+    dst += count * 3;
+    src += count;
+    for (; count > 0; --count) {
+        dst -= 3;
+        const int16_t sample = *--src;
 #if HAVE_BIG_ENDIAN
-        *dst++ = *src >> 8;
-        *dst++ = *src++;
-        *dst++ = 0;
+        dst[0] = sample >> 8;
+        dst[1] = sample;
+        dst[2] = 0;
 #else
-        *dst++ = 0;
-        *dst++ = *src;
-        *dst++ = *src++ >> 8;
+        dst[0] = 0;
+        dst[1] = sample;
+        dst[2] = sample >> 8;
 #endif
     }
 }
 
 void memcpy_to_p24_from_float(uint8_t *dst, const float *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         int32_t ival = clamp24_from_float(*src++);
 
 #if HAVE_BIG_ENDIAN
@@ -156,7 +170,7 @@ void memcpy_to_p24_from_float(uint8_t *dst, const float *src, size_t count)
 
 void memcpy_to_p24_from_q8_23(uint8_t *dst, const int32_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         int32_t ival = clamp24_from_q8_23(*src++);
 
 #if HAVE_BIG_ENDIAN
@@ -173,7 +187,7 @@ void memcpy_to_p24_from_q8_23(uint8_t *dst, const int32_t *src, size_t count)
 
 void memcpy_to_p24_from_i32(uint8_t *dst, const int32_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         int32_t ival = *src++ >> 8;
 
 #if HAVE_BIG_ENDIAN
@@ -190,75 +204,93 @@ void memcpy_to_p24_from_i32(uint8_t *dst, const int32_t *src, size_t count)
 
 void memcpy_to_q8_23_from_i16(int32_t *dst, const int16_t *src, size_t count)
 {
-    while (count--) {
-        *dst++ = (int32_t)*src++ << 8;
+    dst += count;
+    src += count;
+    for (; count > 0; --count) {
+        *--dst = (int32_t)*--src << 8;
     }
 }
 
 void memcpy_to_q8_23_from_float_with_clamp(int32_t *dst, const float *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = clamp24_from_float(*src++);
     }
 }
 
 void memcpy_to_q8_23_from_p24(int32_t *dst, const uint8_t *src, size_t count)
 {
-    while (count--) {
+    dst += count;
+    src += count * 3;
+    for (; count > 0; --count) {
+        src -= 3;
 #if HAVE_BIG_ENDIAN
-        *dst++ = (int8_t)src[0] << 16 | src[1] << 8 | src[2];
+        *--dst = (int8_t)src[0] << 16 | src[1] << 8 | src[2];
 #else
-        *dst++ = (int8_t)src[2] << 16 | src[1] << 8 | src[0];
+        *--dst = (int8_t)src[2] << 16 | src[1] << 8 | src[0];
 #endif
-        src += 3;
     }
 }
 
 void memcpy_to_q4_27_from_float(int32_t *dst, const float *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = clampq4_27_from_float(*src++);
     }
 }
 
 void memcpy_to_i16_from_q8_23(int16_t *dst, const int32_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = clamp16(*src++ >> 8);
     }
 }
 
 void memcpy_to_float_from_q8_23(float *dst, const int32_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = float_from_q8_23(*src++);
     }
 }
 
 void memcpy_to_i32_from_i16(int32_t *dst, const int16_t *src, size_t count)
 {
-    while (count--) {
-        *dst++ = (int32_t)*src++ << 16;
+    dst += count;
+    src += count;
+    for (; count > 0; --count) {
+        *--dst = (int32_t)*--src << 16;
     }
 }
 
 void memcpy_to_i32_from_float(int32_t *dst, const float *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = clamp32_from_float(*src++);
     }
 }
 
 void memcpy_to_float_from_i32(float *dst, const int32_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = float_from_i32(*src++);
     }
 }
 
+void memcpy_to_float_from_float_with_clamping(float *dst, const float *src, size_t count,
+                                              float absMax) {
+    // Note: using NEON intrinsics (vminq_f32, vld1q_f32...) did NOT accelerate
+    // the function when benchmarked. The compiler already vectorize using FMINNM f32x4 & similar.
+    // Note: clamping induce a ~20% overhead compared to memcpy for count in [64, 512]
+    //       See primitives_benchmark
+    for (; count > 0; --count) {
+        const float sample = *src++;
+        *dst++ = fmax(-absMax, fmin(absMax, sample));
+    }
+}
+
 void downmix_to_mono_i16_from_stereo_i16(int16_t *dst, const int16_t *src, size_t count)
 {
-    while (count--) {
+    for (; count > 0; --count) {
         *dst++ = (int16_t)(((int32_t)src[0] + (int32_t)src[1]) >> 1);
         src += 2;
     }
@@ -266,17 +298,19 @@ void downmix_to_mono_i16_from_stereo_i16(int16_t *dst, const int16_t *src, size_
 
 void upmix_to_stereo_i16_from_mono_i16(int16_t *dst, const int16_t *src, size_t count)
 {
-    while (count--) {
-        int32_t temp = *src++;
+    dst += count * 2;
+    src += count;
+    for (; count > 0; --count) {
+        const int32_t temp = *--src;
+        dst -= 2;
         dst[0] = temp;
         dst[1] = temp;
-        dst += 2;
     }
 }
 
 void downmix_to_mono_float_from_stereo_float(float *dst, const float *src, size_t frames)
 {
-    while (frames--) {
+    for (; frames > 0; --frames) {
         *dst++ = (src[0] + src[1]) * 0.5;
         src += 2;
     }
@@ -284,21 +318,21 @@ void downmix_to_mono_float_from_stereo_float(float *dst, const float *src, size_
 
 void upmix_to_stereo_float_from_mono_float(float *dst, const float *src, size_t frames)
 {
-    while (frames--) {
-        float temp = *src++;
+    dst += frames * 2;
+    src += frames;
+    for (; frames > 0; --frames) {
+        const float temp = *--src;
+        dst -= 2;
         dst[0] = temp;
         dst[1] = temp;
-        dst += 2;
     }
 }
 
 size_t nonZeroMono32(const int32_t *samples, size_t count)
 {
     size_t nonZero = 0;
-    while (count-- > 0) {
-        if (*samples++ != 0) {
-            nonZero++;
-        }
+    for (; count > 0; --count) {
+        nonZero += *samples++ != 0;
     }
     return nonZero;
 }
@@ -306,10 +340,8 @@ size_t nonZeroMono32(const int32_t *samples, size_t count)
 size_t nonZeroMono16(const int16_t *samples, size_t count)
 {
     size_t nonZero = 0;
-    while (count-- > 0) {
-        if (*samples++ != 0) {
-            nonZero++;
-        }
+    for (; count > 0; --count) {
+        nonZero += *samples++ != 0;
     }
     return nonZero;
 }
@@ -317,10 +349,8 @@ size_t nonZeroMono16(const int16_t *samples, size_t count)
 size_t nonZeroStereo32(const int32_t *frames, size_t count)
 {
     size_t nonZero = 0;
-    while (count-- > 0) {
-        if (frames[0] != 0 || frames[1] != 0) {
-            nonZero++;
-        }
+    for (; count > 0; --count) {
+        nonZero += frames[0] != 0 || frames[1] != 0;
         frames += 2;
     }
     return nonZero;
@@ -329,10 +359,8 @@ size_t nonZeroStereo32(const int32_t *frames, size_t count)
 size_t nonZeroStereo16(const int16_t *frames, size_t count)
 {
     size_t nonZero = 0;
-    while (count-- > 0) {
-        if (frames[0] != 0 || frames[1] != 0) {
-            nonZero++;
-        }
+    for (; count > 0; --count) {
+        nonZero += frames[0] != 0 || frames[1] != 0;
         frames += 2;
     }
     return nonZero;
@@ -345,7 +373,7 @@ size_t nonZeroStereo16(const int16_t *frames, size_t count)
 #define copy_frame_by_mask(dst, dmask, src, smask, count, zero) \
 { \
     uint32_t bit, ormask; \
-    while ((count)--) { \
+    for (; (count) > 0; --(count)) { \
         ormask = (dmask) | (smask); \
         while (ormask) { \
             bit = ormask & -ormask; /* get lowest bit */ \
@@ -417,7 +445,7 @@ void memcpy_by_channel_mask(void *dst, uint32_t dst_mask,
 { \
     unsigned i; \
     int index; \
-    while ((count)--) { \
+    for (; (count) > 0; --(count)) { \
         for (i = 0; i < (dst_channels); ++i) { \
             index = (idxary)[i]; \
             *(dst)++ = index < 0 ? (zero) : (src)[index]; \
@@ -524,3 +552,58 @@ size_t memcpy_by_index_array_initialization_dst_index(int8_t *idxary, size_t idx
     }
     return dst_idx;
 }
+
+void accumulate_i16(int16_t *dst, const int16_t *src, size_t count) {
+    while (count--) {
+        *dst = clamp16((int32_t)*dst + *src++);
+        ++dst;
+    }
+}
+
+void accumulate_u8(uint8_t *dst, const uint8_t *src, size_t count) {
+    int32_t sum;
+    for (; count > 0; --count) {
+        // 8-bit samples are centered around 0x80.
+        sum = *dst + *src++ - 0x80;
+        // Clamp to [0, 0xff].
+        *dst++ = (sum & 0x100) ? (~sum >> 9) : sum;
+    }
+}
+
+void accumulate_p24(uint8_t *dst, const uint8_t *src, size_t count) {
+    for (; count > 0; --count) {
+        // Unpack.
+        int32_t dst_q8_23 = 0;
+        int32_t src_q8_23 = 0;
+        memcpy_to_q8_23_from_p24(&dst_q8_23, dst, 1);
+        memcpy_to_q8_23_from_p24(&src_q8_23, src, 1);
+
+        // Accumulate and overwrite.
+        dst_q8_23 += src_q8_23;
+        memcpy_to_p24_from_q8_23(dst, &dst_q8_23, 1);
+
+        // Move on to next sample.
+        dst += 3;
+        src += 3;
+  }
+}
+
+void accumulate_q8_23(int32_t *dst, const int32_t *src, size_t count) {
+    for (; count > 0; --count) {
+        *dst = clamp24_from_q8_23(*dst + *src++);
+        ++dst;
+    }
+}
+
+void accumulate_i32(int32_t *dst, const int32_t *src, size_t count) {
+    for (; count > 0; --count) {
+        *dst = clamp32((int64_t)*dst + *src++);
+        ++dst;
+    }
+}
+
+void accumulate_float(float *dst, const float *src, size_t count) {
+    for (; count > 0; --count) {
+        *dst++ += *src++;
+    }
+}
index 0f13a1e..e0403aa 100644 (file)
@@ -24,6 +24,26 @@ cc_test {
 }
 
 cc_binary {
+    name: "primitives_benchmark",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+
+    srcs: ["primitives_benchmark.cpp"],
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+    static_libs: [
+        "libgoogle-benchmark",
+        "libaudioutils",
+    ],
+}
+
+cc_binary {
     name: "fifo_tests",
     host_supported: true,
     srcs: ["fifo_tests.cpp"],
@@ -170,3 +190,60 @@ cc_test {
         },
     }
 }
+
+cc_test {
+    name: "channels_tests",
+    host_supported: true,
+
+    shared_libs: [
+        "libcutils",
+        "liblog",
+    ],
+    srcs: ["channels_tests.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    target: {
+        android: {
+            shared_libs: ["libaudioutils"],
+        },
+        host: {
+            static_libs: ["libaudioutils"],
+        },
+    }
+}
+
+cc_test {
+    name: "string_tests",
+    host_supported: false,
+
+    shared_libs: ["libaudioutils"],
+    srcs: ["string_tests.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+}
+
+cc_test {
+    name: "format_tests",
+    host_supported: true,
+
+    shared_libs: [
+        "liblog",
+    ],
+    srcs: ["format_tests.cpp"],
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+    target: {
+        android: {
+            shared_libs: ["libaudioutils"],
+        },
+        host: {
+            static_libs: ["libaudioutils"],
+        },
+    }
+}
index 401847d..78de692 100755 (executable)
@@ -26,3 +26,19 @@ adb shell /system/bin/primitives_tests
 echo "testing power"
 adb push $OUT/data/nativetest/power_tests/power_tests /system/bin
 adb shell /system/bin/power_tests
+
+echo "testing channels"
+adb push $OUT/data/nativetest/channels_tests/channels_tests /system/bin
+adb shell /system/bin/channels_tests
+
+echo "string test"
+adb push $OUT/data/nativetest/string_tests/string_tests /system/bin
+adb shell /system/bin/string_tests
+
+echo "format tests"
+adb push $OUT/data/nativetest/format_tests/format_tests /system/bin
+adb shell /system/bin/format_tests
+
+echo "benchmarking primitives"
+adb push $OUT/system/bin/primitives_benchmark /system/bin
+adb shell /system/bin/primitives_benchmark
diff --git a/audio_utils/tests/channels_tests.cpp b/audio_utils/tests/channels_tests.cpp
new file mode 100644 (file)
index 0000000..eb3b5e0
--- /dev/null
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_channels_tests"
+
+#include <math.h>
+#include <vector>
+
+#include <gtest/gtest.h>
+#include <log/log.h>
+
+#include <audio_utils/channels.h>
+
+// TODO: Make a common include file for helper functions.
+
+template<typename T>
+void checkMonotone(const T *ary, size_t size)
+{
+    for (size_t i = 1; i < size; ++i) {
+        EXPECT_LT(ary[i-1], ary[i]);
+    }
+}
+
+template<typename T>
+void checkUnsignedMonotoneOrZero(const T *ary, size_t size)
+{
+    if (size == 0) return;
+
+    T least = ary[0];
+    for (size_t i = 1; i < size; ++i) {
+        if (ary[i]) {
+            EXPECT_LT(least, ary[i]);
+            least = ary[i];
+        }
+    }
+}
+
+template<typename T>
+void expectEq(const T &c1, const T &c2) {
+    EXPECT_EQ(c1.size(), c2.size());
+    EXPECT_EQ(0, memcmp(c1.data(), c2.data(), sizeof(c1[0]) * std::min(c1.size(), c2.size())));
+}
+
+TEST(audio_utils_channels, adjust_channels) {
+    constexpr size_t size = 65536;
+    std::vector<uint16_t> u16ref(size);
+    std::vector<uint16_t> u16expand(size * 2);
+    std::vector<uint16_t> u16ary(size);
+
+    // reference buffer is monotonic.
+    for (size_t i = 0; i < u16ref.size(); ++i) {
+        u16ref[i] = i;
+    }
+
+    // expand channels from stereo to quad.
+    adjust_channels(
+            u16ref.data() /*in_buff*/,
+            2 /*in_channels*/,
+            u16expand.data() /*out_buff*/,
+            4 /*out_channels*/,
+            sizeof(u16ref[0]) /*sample_size_in_bytes*/,
+            sizeof(u16ref[0]) * u16ref.size() /*num_in_bytes*/);
+
+    // expanded buffer must increase (or be zero).
+    checkUnsignedMonotoneOrZero(u16expand.data(), u16expand.size());
+
+    // contract channels back to stereo.
+    adjust_channels(
+            u16expand.data() /*in_buff*/,
+            4 /*in_channels*/,
+            u16ary.data() /*out_buff*/,
+            2 /*out_channels*/,
+            sizeof(u16expand[0]) /*sample_size_in_bytes*/,
+            sizeof(u16expand[0]) * u16expand.size() /*num_in_bytes*/);
+
+    // contracted array must be identical to original.
+    expectEq(u16ary, u16ref);
+}
+
+TEST(audio_utils_channels, adjust_selected_channels) {
+    constexpr size_t size = 65536;
+    std::vector<uint16_t> u16ref(size);
+    std::vector<uint16_t> u16contract(size / 2);
+    std::vector<uint16_t> u16ary(size);
+
+    // reference buffer is monotonic.
+    for (size_t i = 0; i < u16ref.size(); ++i) {
+        u16ref[i] = i;
+    }
+
+    // contract from quad to stereo.
+    adjust_selected_channels(
+            u16ref.data() /*in_buff*/,
+            4 /*in_channels*/,
+            u16contract.data() /*out_buff*/,
+            2 /*out_channels*/,
+            sizeof(u16ref[0]) /*sample_size_in_bytes*/,
+            sizeof(u16ref[0]) * u16ref.size() /*num_in_bytes*/);
+
+    // contracted buffer must increase.
+    checkMonotone(u16contract.data(), u16contract.size());
+
+    // initialize channels 3 and 4 of final comparison array.
+    for (size_t i = 0; i < u16ary.size() / 4; ++i) {
+        u16ary[i * 4 + 2] = u16ref[i * 4 + 2];
+        u16ary[i * 4 + 3] = u16ref[i * 4 + 3];
+    }
+
+    // expand stereo into channels 1 and 2 of quad comparison array.
+    adjust_selected_channels(
+            u16contract.data() /*in_buff*/,
+            2 /*in_channels*/,
+            u16ary.data() /*out_buff*/,
+            4 /*out_channels*/,
+            sizeof(u16contract[0]) /*sample_size_in_bytes*/,
+            sizeof(u16contract[0]) * u16contract.size() /*num_in_bytes*/);
+
+    // comparison array must be identical to original.
+    expectEq(u16ary, u16ref);
+}
+
+
index f9c72e2..212bf78 100644 (file)
  * limitations under the License.
  */
 
-#include <new>
+#include <errno.h>
 #include <stdio.h>
+#include <string>
 #include <sys/mman.h>
 #include <sys/types.h>
 #include <sys/wait.h>
 #include <unistd.h>
 
+#include <new>
+
 #include <audio_utils/fifo.h>
 #include <cutils/ashmem.h>
 
diff --git a/audio_utils/tests/format_tests.cpp b/audio_utils/tests/format_tests.cpp
new file mode 100644 (file)
index 0000000..0526b30
--- /dev/null
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_format_tests"
+#include <log/log.h>
+
+#include <audio_utils/format.h>
+#include <gtest/gtest.h>
+
+/** returns true if the format is a common source or destination format.
+    memcpy_by_audio_format() allows interchange between any PCM format and the
+    "common" PCM 16 bit and PCM float formats. */
+static bool is_common_format(audio_format_t format) {
+    return format == AUDIO_FORMAT_PCM_16_BIT || format == AUDIO_FORMAT_PCM_FLOAT;
+}
+
+// Initialize PCM 16 bit ramp for basic data sanity check (generated from PCM 8 bit data).
+// TODO: consider creating fillPseudoRandomValue().
+template<size_t size>
+static void fillRamp(int16_t(&buffer)[size])
+{
+    // Create PCM 16 bit data based on PCM 8 bit format because PCM 8 bit is convertible
+    // to all other audio formats without loss; hence, round trip conversion preserves equality.
+    uint8_t bytes[size];
+    for (size_t i = 0; i < size; ++i) {
+        bytes[i] = i;
+    }
+    // convert to PCM 16 bit
+    memcpy_by_audio_format(
+            buffer, AUDIO_FORMAT_PCM_16_BIT,
+            bytes, AUDIO_FORMAT_PCM_8_BIT, size);
+
+    uint8_t check[size];
+    memcpy_by_audio_format(
+            check, AUDIO_FORMAT_PCM_8_BIT,
+            buffer, AUDIO_FORMAT_PCM_16_BIT, size);
+    EXPECT_EQ(0, memcmp(check, bytes, size));
+}
+
+class FormatTest : public testing::TestWithParam<std::tuple<audio_format_t, audio_format_t>>
+{
+};
+
+TEST_P(FormatTest, memcpy_by_audio_format)
+{
+    // fetch parameters
+    const auto param = GetParam();
+    const audio_format_t src_encoding = std::get<0>(param);
+    const audio_format_t dst_encoding = std::get<1>(param);
+
+    // either source or destination (or both) need to be a common format
+    if (!is_common_format(src_encoding) && !is_common_format(dst_encoding)) {
+        printf("skip conversion src:%#x  dst:%#x\n", src_encoding, dst_encoding);
+        return;
+    }
+
+    constexpr size_t SAMPLES = UINT8_MAX;
+    constexpr audio_format_t orig_encoding = AUDIO_FORMAT_PCM_16_BIT;
+    int16_t orig_data[SAMPLES];
+
+    fillRamp(orig_data);
+
+    // data buffer for in-place conversion (uint32_t is maximum sample size of 4 bytes)
+    uint32_t data[SAMPLES];
+    // check buffer is used to compare out-of-place vs in-place conversion.
+    uint32_t check[SAMPLES];
+
+    printf("trying conversion src:%#x  dst:%#x\n", src_encoding, dst_encoding);
+    fflush(stdout);
+    // Copy original data to data buffer at src_encoding.
+    memcpy_by_audio_format(
+            data, src_encoding,
+            orig_data, orig_encoding, SAMPLES);
+
+    // Convert from src encoding to dst encoding.
+    memcpy_by_audio_format(
+            check, dst_encoding,
+            data, src_encoding, SAMPLES);
+
+    // Check in-place is same as out-of-place conversion.
+    memcpy_by_audio_format(
+            data, dst_encoding,
+            data, src_encoding, SAMPLES);
+    EXPECT_EQ(0, memcmp(check, data, SAMPLES * audio_bytes_per_sample(dst_encoding)));
+
+    // Go back to the original data encoding for comparison.
+    memcpy_by_audio_format(
+            data, orig_encoding,
+            data, dst_encoding, SAMPLES);
+
+    // Raw byte compare at the original encoding must succeed - our conversions
+    // must be lossless for PCM 8 bit representation which orig_data was constructed from.
+    EXPECT_EQ(0,
+            memcmp(data, orig_data, SAMPLES * audio_bytes_per_sample(orig_encoding)));
+}
+
+INSTANTIATE_TEST_CASE_P(FormatVariations, FormatTest, ::testing::Combine(
+    ::testing::Values(
+        AUDIO_FORMAT_PCM_8_BIT,
+        AUDIO_FORMAT_PCM_16_BIT,
+        AUDIO_FORMAT_PCM_FLOAT,
+        AUDIO_FORMAT_PCM_24_BIT_PACKED,
+        AUDIO_FORMAT_PCM_32_BIT,
+        AUDIO_FORMAT_PCM_8_24_BIT
+    ),
+    ::testing::Values(
+        AUDIO_FORMAT_PCM_8_BIT,
+        AUDIO_FORMAT_PCM_16_BIT,
+        AUDIO_FORMAT_PCM_FLOAT,
+        AUDIO_FORMAT_PCM_24_BIT_PACKED,
+        AUDIO_FORMAT_PCM_32_BIT,
+        AUDIO_FORMAT_PCM_8_24_BIT
+    )));
diff --git a/audio_utils/tests/primitives_benchmark.cpp b/audio_utils/tests/primitives_benchmark.cpp
new file mode 100644 (file)
index 0000000..ac00b1f
--- /dev/null
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstddef>
+#include <random>
+#include <vector>
+
+#include <benchmark/benchmark.h>
+
+#include <audio_utils/primitives.h>
+
+static void BM_MemcpyToFloatFromFloatWithClamping(benchmark::State& state) {
+    const size_t count = state.range(0);
+    const float srcMax = state.range(1);
+    const float absMax = 1.413;
+
+    std::vector<float> src(count);
+    std::vector<float> dst(count);
+    std::vector<float> expected(count);
+
+    // Initialize src buffer with deterministic pseudo-random values
+    std::minstd_rand gen(count);
+    std::uniform_real_distribution<> dis(-srcMax, srcMax);
+    for (size_t i = 0; i < count; i++) {
+        src[i] = dis(gen);
+        expected[i] = fmin(absMax, fmax(-absMax, src[i]));
+    }
+
+    // Run the test
+    while (state.KeepRunning()) {
+        benchmark::DoNotOptimize(src.data());
+        benchmark::DoNotOptimize(dst.data());
+        memcpy_to_float_from_float_with_clamping(dst.data(), src.data(), count, 1.413);
+        benchmark::ClobberMemory();
+    }
+
+    if (expected != dst) {
+        state.SkipWithError("Incorrect clamping!");
+    }
+    state.SetComplexityN(state.range(0));
+}
+
+BENCHMARK(BM_MemcpyToFloatFromFloatWithClamping)->RangeMultiplier(2)->Ranges({{10, 8<<12}, {1, 2}});
+
+static void BM_MemcpyFloat(benchmark::State& state) {
+    const size_t count = state.range(0);
+
+    std::vector<float> src(count);
+    std::vector<float> dst(count);
+
+    // Initialize src buffer with deterministic pseudo-random values
+    std::minstd_rand gen(count);
+    std::uniform_real_distribution<> dis;
+    for (size_t i = 0; i < count; i++) {
+        src[i] = dis(gen);
+    }
+
+    // Run the test
+    while (state.KeepRunning()) {
+        benchmark::DoNotOptimize(src.data());
+        benchmark::DoNotOptimize(dst.data());
+        memcpy(dst.data(), src.data(), count * sizeof(float));
+        benchmark::ClobberMemory();
+    }
+
+    if (src != dst) {
+        state.SkipWithError("Incorrect memcpy!");
+    }
+    state.SetComplexityN(state.range(0));
+}
+
+BENCHMARK(BM_MemcpyFloat)->RangeMultiplier(2)->Ranges({{10, 8<<12}});
+
+static void BM_MemcpyToFloatFromI16(benchmark::State& state) {
+    const size_t count = state.range(0);
+
+    std::vector<int16_t> src(count);
+    std::vector<float> dst(count);
+
+    // Initialize src buffer with deterministic pseudo-random values
+    std::minstd_rand gen(count);
+    std::uniform_int_distribution<> dis(INT16_MIN, INT16_MAX);
+    for (size_t i = 0; i < count; i++) {
+        src[i] = dis(gen);
+    }
+
+    // Run the test
+    while (state.KeepRunning()) {
+        benchmark::DoNotOptimize(src.data());
+        benchmark::DoNotOptimize(dst.data());
+        memcpy_to_float_from_i16(dst.data(), src.data(), count);
+        benchmark::ClobberMemory();
+    }
+
+    state.SetComplexityN(state.range(0));
+}
+
+BENCHMARK(BM_MemcpyToFloatFromI16)->RangeMultiplier(2)->Ranges({{10, 8<<12}});
+
+
+static void BM_MemcpyToI16FromFloat(benchmark::State& state) {
+    const size_t count = state.range(0);
+
+    std::vector<float> src(count);
+    std::vector<int16_t> dst(count);
+
+    // Initialize src buffer with deterministic pseudo-random values
+    std::minstd_rand gen(count);
+    std::uniform_real_distribution<> dis;
+    for (size_t i = 0; i < count; i++) {
+        src[i] = dis(gen);
+    }
+
+    // Run the test
+    while (state.KeepRunning()) {
+        benchmark::DoNotOptimize(src.data());
+        benchmark::DoNotOptimize(dst.data());
+        memcpy_to_i16_from_float(dst.data(), src.data(), count);
+        benchmark::ClobberMemory();
+    }
+
+    state.SetComplexityN(state.range(0));
+}
+
+BENCHMARK(BM_MemcpyToI16FromFloat)->RangeMultiplier(2)->Ranges({{10, 8<<12}});
+
+BENCHMARK_MAIN();
index 44ba6b8..d28516a 100644 (file)
@@ -35,6 +35,15 @@ static const int32_t lim16pos = (1 << 15) - 1;
 static const int32_t lim16neg = -(1 << 15);
 static const int32_t lim24pos = (1 << 23) - 1;
 static const int32_t lim24neg = -(1 << 23);
+static const int64_t lim32pos = 0x000000007fffffff;
+static const int64_t lim32neg = 0xffffffff80000000;
+
+// Use memset here since it is generally the fastest method of clearing data,
+// but could be changed to std::fill or assignment should those prove faster.
+template <typename T>
+static void zeroFill(T &container) {
+    memset(container.data(), 0, container.size() * sizeof(container[0]));
+}
 
 inline void testClamp8(float f)
 {
@@ -187,30 +196,33 @@ TEST(audio_utils_primitives, clamp_to_int) {
 
 TEST(audio_utils_primitives, memcpy) {
     // test round-trip.
-    int16_t *i16ref = new int16_t[65536];
-    int16_t *i16ary = new int16_t[65536];
-    int32_t *i32ary = new int32_t[65536];
-    float *fary = new float[65536];
-    uint8_t *pary = new uint8_t[65536*3];
+    constexpr size_t size = 65536;
+    std::vector<int16_t> i16ref(size);
+    std::vector<int16_t> i16ary(size);
+    std::vector<int32_t> i32ary(size);
+    std::vector<float> fary(size);
+    std::vector<uint8_t> pary(size * 3);
 
-    for (size_t i = 0; i < 65536; ++i) {
+
+    // set signed reference monotonic array from -32768 to 32767
+    for (size_t i = 0; i < i16ref.size(); ++i) {
         i16ref[i] = i16ary[i] = i - 32768;
     }
 
     // do round-trip testing i16 and float
-    memcpy_to_float_from_i16(fary, i16ary, 65536);
-    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
-    checkMonotone(fary, 65536);
+    memcpy_to_float_from_i16(fary.data(), i16ary.data(), fary.size());
+    zeroFill(i16ary);
+    checkMonotone(fary.data(), fary.size());
 
-    memcpy_to_i16_from_float(i16ary, fary, 65536);
-    memset(fary, 0, 65536 * sizeof(fary[0]));
-    checkMonotone(i16ary, 65536);
+    memcpy_to_i16_from_float(i16ary.data(), fary.data(), i16ary.size());
+    zeroFill(fary);
+    checkMonotone(i16ary.data(), i16ary.size());
 
     // TODO make a template case for the following?
 
     // do round-trip testing p24 to i16 and float
-    memcpy_to_p24_from_i16(pary, i16ary, 65536);
-    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
+    memcpy_to_p24_from_i16(pary.data(), i16ary.data(), size /* note pary elem is 3 bytes */);
+    zeroFill(i16ary);
 
     // check an intermediate format at a position(???)
 #if 0
@@ -218,114 +230,121 @@ TEST(audio_utils_primitives, memcpy) {
             1025, (unsigned) pary[1025*3],
             1025, (unsigned) pary[1025*3+1],
             1025, (unsigned) pary[1025*3+2]
-            );
+    );
 #endif
 
-    memcpy_to_float_from_p24(fary, pary, 65536);
-    memset(pary, 0, 65536 * 3 * sizeof(pary[0]));
-    checkMonotone(fary, 65536);
+    memcpy_to_float_from_p24(fary.data(), pary.data(), fary.size());
+    zeroFill(pary);
+    checkMonotone(fary.data(), fary.size());
 
-    memcpy_to_p24_from_float(pary, fary, 65536);
-    memset(fary, 0, 65536 * sizeof(fary[0]));
+    memcpy_to_p24_from_float(pary.data(), fary.data(), size /* note pary elem is 3 bytes */);
+    zeroFill(fary);
+    checkMonotonep24(pary.data(), pary.size() /* this is * 3*/);
 
-    memcpy_to_i16_from_p24(i16ary, pary, 65536);
-    memset(pary, 0, 65536 * 3 * sizeof(pary[0]));
-    checkMonotone(i16ary, 65536);
+    memcpy_to_i16_from_p24(i16ary.data(), pary.data(), i16ary.size());
+    zeroFill(pary);
+    checkMonotone(i16ary.data(), i16ary.size());
 
     // do round-trip testing q8_23 to i16 and float
-    memcpy_to_q8_23_from_i16(i32ary, i16ary, 65536);
-    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
-    checkMonotone(i32ary, 65536);
+    memcpy_to_q8_23_from_i16(i32ary.data(), i16ary.data(), i32ary.size());
+    zeroFill(i16ary);
+    checkMonotone(i32ary.data(), i32ary.size());
 
-    memcpy_to_float_from_q8_23(fary, i32ary, 65536);
-    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
-    checkMonotone(fary, 65536);
+    memcpy_to_float_from_q8_23(fary.data(), i32ary.data(), fary.size());
+    zeroFill(i32ary);
+    checkMonotone(fary.data(), fary.size());
 
-    memcpy_to_q8_23_from_float_with_clamp(i32ary, fary, 65536);
-    memset(fary, 0, 65536 * sizeof(fary[0]));
-    checkMonotone(i32ary, 65536);
+    memcpy_to_q8_23_from_float_with_clamp(i32ary.data(), fary.data(), i32ary.size());
+    zeroFill(fary);
+    checkMonotone(i32ary.data(), i32ary.size());
 
-    memcpy_to_i16_from_q8_23(i16ary, i32ary, 65536);
-    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
-    checkMonotone(i16ary, 65536);
+    memcpy_to_i16_from_q8_23(i16ary.data(), i32ary.data(), i16ary.size());
+    zeroFill(i32ary);
+    checkMonotone(i16ary.data(), i16ary.size());
 
     // do round-trip testing i32 to i16 and float
-    memcpy_to_i32_from_i16(i32ary, i16ary, 65536);
-    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
-    checkMonotone(i32ary, 65536);
+    memcpy_to_i32_from_i16(i32ary.data(), i16ary.data(), i32ary.size());
+    zeroFill(i16ary);
+    checkMonotone(i32ary.data(), i32ary.size());
 
-    memcpy_to_float_from_i32(fary, i32ary, 65536);
-    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
-    checkMonotone(fary, 65536);
+    memcpy_to_float_from_i32(fary.data(), i32ary.data(), fary.size());
+    zeroFill(i32ary);
+    checkMonotone(fary.data(), fary.size());
 
-    memcpy_to_i32_from_float(i32ary, fary, 65536);
-    memset(fary, 0, 65536 * sizeof(fary[0]));
-    checkMonotone(i32ary, 65536);
+    memcpy_to_i32_from_float(i32ary.data(), fary.data(), i32ary.size());
+    zeroFill(fary);
+    checkMonotone(i32ary.data(), i32ary.size());
 
-    memcpy_to_i16_from_i32(i16ary, i32ary, 65536);
-    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
-    checkMonotone(i16ary, 65536);
+    memcpy_to_i16_from_i32(i16ary.data(), i32ary.data(), i16ary.size());
+    zeroFill(i32ary);
+    checkMonotone(i16ary.data(), i16ary.size());
 
     // do round-trip test i16 -> p24 -> i32 -> p24 -> q8_23 -> p24 -> i16
-    memcpy_to_p24_from_i16(pary, i16ary, 65536);
-    memset(i16ary, 0, 65536 * sizeof(i16ary[0]));
-    checkMonotonep24(pary, 65536 * 3);
+    memcpy_to_p24_from_i16(pary.data(), i16ary.data(), size /* note pary elem is 3 bytes */);
+    zeroFill(i16ary);
+    checkMonotonep24(pary.data(), pary.size() /* this is * 3*/);
 
-    memcpy_to_i32_from_p24(i32ary, pary, 65536);
-    memset(pary, 0, 65536 * 3 * sizeof(pary[0]));
-    checkMonotone(i32ary, 65536);
+    memcpy_to_i32_from_p24(i32ary.data(), pary.data(), i32ary.size());
+    zeroFill(pary);
+    checkMonotone(i32ary.data(), i32ary.size());
 
-    memcpy_to_p24_from_i32(pary, i32ary, 65536);
-    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
-    checkMonotonep24(pary, 65536 * 3);
+    memcpy_to_p24_from_i32(pary.data(), i32ary.data(), size /* note pary elem is 3 bytes */);
+    zeroFill(i32ary);
+    checkMonotonep24(pary.data(), pary.size() /* this is * 3*/);
 
-    memcpy_to_q8_23_from_p24(i32ary, pary, 65536);
-    memset(pary, 0, 65536 * 3 * sizeof(pary[0]));
-    checkMonotone(i32ary, 65536);
+    memcpy_to_q8_23_from_p24(i32ary.data(), pary.data(), i32ary.size());
+    zeroFill(pary);
+    checkMonotone(i32ary.data(), i32ary.size());
 
-    memcpy_to_p24_from_q8_23(pary, i32ary, 65536);
-    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
-    checkMonotonep24(pary, 65536 * 3);
+    memcpy_to_p24_from_q8_23(pary.data(), i32ary.data(), size /* note pary elem is 3 bytes */);
+    zeroFill(i32ary);
+    checkMonotonep24(pary.data(), pary.size() /* this is * 3*/);
 
-    memcpy_to_i16_from_p24(i16ary, pary, 65536);
-    memset(pary, 0, 65536 * 3 * sizeof(pary[0]));
-    checkMonotone(i16ary, 65536);
+    memcpy_to_i16_from_p24(i16ary.data(), pary.data(), i16ary.size());
+    zeroFill(pary);
+    checkMonotone(i16ary.data(), i16ary.size());
 
     // do partial round-trip testing q4_27 to i16 and float
-    memcpy_to_float_from_i16(fary, i16ary, 65536);
-    //memset(i16ary, 0, 65536 * sizeof(i16ary[0])); // not cleared: we don't do full roundtrip
+    memcpy_to_float_from_i16(fary.data(), i16ary.data(), fary.size());
+    zeroFill(i16ary);
 
-    memcpy_to_q4_27_from_float(i32ary, fary, 65536);
-    memset(fary, 0, 65536 * sizeof(fary[0]));
-    checkMonotone(i32ary, 65536);
+    memcpy_to_q4_27_from_float(i32ary.data(), fary.data(), i32ary.size());
+    zeroFill(fary);
+    checkMonotone(i32ary.data(), i32ary.size());
 
-    memcpy_to_float_from_q4_27(fary, i32ary, 65536);
-    memset(i32ary, 0, 65536 * sizeof(i32ary[0]));
-    checkMonotone(fary, 65536);
+    memcpy_to_i16_from_q4_27(i16ary.data(), i32ary.data(), i16ary.size());
+    checkMonotone(i16ary.data(), i16ary.size());
+    EXPECT_EQ(0, memcmp(i16ary.data(), i16ref.data(), i16ary.size() * sizeof(i16ary[0])));
+
+    zeroFill(i16ary);
+
+    // ditherAndClamp() has non-standard parameters - memcpy_to_float_from_q4_27() is preferred
+    ditherAndClamp(reinterpret_cast<int32_t *>(i16ary.data()),
+            i32ary.data(), i16ary.size() / 2);
+    checkMonotone(i16ary.data(), i16ary.size());
+    EXPECT_EQ(0, memcmp(i16ary.data(), i16ref.data(), i16ary.size() * sizeof(i16ary[0])));
+
+    memcpy_to_float_from_q4_27(fary.data(), i32ary.data(), fary.size());
+    zeroFill(i32ary);
+    checkMonotone(fary.data(), fary.size());
 
     // at the end, our i16ary must be the same. (Monotone should be equivalent to this)
-    EXPECT_EQ(0, memcmp(i16ary, i16ref, 65536*sizeof(i16ary[0])));
+    EXPECT_EQ(0, memcmp(i16ary.data(), i16ref.data(), i16ary.size() * sizeof(i16ary[0])));
 
     // test round-trip for u8 and float.
-    uint8_t *u8ref = new uint8_t[256];
-    uint8_t *u8ary = new uint8_t[256];
+    constexpr size_t u8size = 256;
+    std::vector<uint8_t> u8ref(u8size);
+    std::vector<uint8_t> u8ary(u8size);
 
-    for (unsigned i = 0; i < 256; ++i) {
+    for (size_t i = 0; i < u8ref.size(); ++i) {
         u8ref[i] = i;
     }
 
-    memcpy_to_float_from_u8(fary, u8ref, 256);
-    memcpy_to_u8_from_float(u8ary, fary, 256);
-
-    EXPECT_EQ(0, memcmp(u8ary, u8ref, 256 * sizeof(u8ary[0])));
+    constexpr size_t testsize = std::min(u8size, size);
+    memcpy_to_float_from_u8(fary.data(), u8ref.data(), testsize);
+    memcpy_to_u8_from_float(u8ary.data(), fary.data(), testsize);
 
-    delete[] u8ref;
-    delete[] u8ary;
-    delete[] i16ref;
-    delete[] i16ary;
-    delete[] i32ary;
-    delete[] fary;
-    delete[] pary;
+    EXPECT_EQ(0, memcmp(u8ary.data(), u8ref.data(), u8ary.size() * sizeof(u8ary[0])));
 }
 
 template<typename T>
@@ -657,35 +676,187 @@ TEST(audio_utils_primitives, memcpy_by_index_array_src_index) {
     delete[] u24ary;
 }
 
-TEST(audio_utils_channels, adjust_channels) {
-    uint16_t *u16ref = new uint16_t[65536];
-    uint16_t *u16expand = new uint16_t[65536*2];
-    uint16_t *u16ary = new uint16_t[65536];
+TEST(audio_utils_primitives, updown_mix) {
+    const size_t size = 32767;
+    std::vector<int16_t> i16ref(size * 2);
+    std::vector<int16_t> i16ary(size * 2);
+
+    for (size_t i = 0; i < size; ++i) {
+        i16ref[i] = i;
+    }
+    upmix_to_stereo_i16_from_mono_i16(i16ary.data(), i16ref.data(), size);
+    downmix_to_mono_i16_from_stereo_i16(i16ary.data(), i16ary.data(), size);
+
+    EXPECT_EQ(0, memcmp(i16ary.data(), i16ref.data(), sizeof(i16ref[0]) * size));
+}
+
+template<typename T, typename TComparison>
+void checkAddedClamped(T *out, const T *in1, const T *in2, size_t size,
+        TComparison limNeg, TComparison limPos)
+{
+    for (size_t i = 0; i < size; ++i) {
+        TComparison added = (TComparison)in1[i] + in2[i];
+        if (added <= limNeg) {
+            EXPECT_EQ(limNeg, out[i]);
+        } else if (added >= limPos) {
+            EXPECT_EQ(limPos, out[i]);
+        } else {
+            EXPECT_EQ(added, out[i]);
+        }
+    }
+}
+
+void checkAddedClampedp24(uint8_t *pary, const uint8_t *in1,
+        const uint8_t *in2, size_t size) {
+    // Convert to q8_23 for comparison.
+    int32_t *outi32ary = new int32_t[size];
+    int32_t *in1i32ary = new int32_t[size];
+    int32_t *in2i32ary = new int32_t[size];
+    memcpy_to_q8_23_from_p24(outi32ary, pary, size);
+    memcpy_to_q8_23_from_p24(in1i32ary, in1, size);
+    memcpy_to_q8_23_from_p24(in2i32ary, in2, size);
+    checkAddedClamped(
+            outi32ary, in1i32ary, in2i32ary, size, lim24neg, lim24pos);
+    delete[] in2i32ary;
+    delete[] in1i32ary;
+    delete[] outi32ary;
+}
+
+void checkAddedClampedu8(uint8_t *out, const uint8_t *in1,
+        const uint8_t *in2, size_t size) {
+    // uint8_t data is centered around 0x80, not 0, so checkAddedClamped
+    // won't work. Convert to i16 first.
+    int16_t *outi16ary = new int16_t[size];
+    int16_t *in1i16ary = new int16_t[size];
+    int16_t *in2i16ary = new int16_t[size];
+    memcpy_to_i16_from_u8(outi16ary, out, size);
+    memcpy_to_i16_from_u8(in1i16ary, in1, size);
+    memcpy_to_i16_from_u8(in2i16ary, in2, size);
+    // Only the higher order bits are used.
+    checkAddedClamped(outi16ary, in1i16ary, in2i16ary, size,
+            -0x8000, 0x7f00);
+    delete[] in2i16ary;
+    delete[] in1i16ary;
+    delete[] outi16ary;
+}
+
+TEST(audio_utils_primitives, accumulate) {
+    int16_t *i16ref = new int16_t[65536];
+    int16_t *i16add = new int16_t[65536];
+    int16_t *i16ary = new int16_t[65536];
 
-    // reference buffer always increases
     for (size_t i = 0; i < 65536; ++i) {
-        u16ref[i] = i;
+        i16ref[i] = i16ary[i] = i16add[(i+1) % 65536] = i - 32768;
     }
 
-    // expand channels from stereo to quad.
-    adjust_channels(u16ref /*in_buff*/, 2 /*in_channels*/,
-            u16expand /*out_buff*/, 4 /*out_channels*/,
-            sizeof(u16ref[0]) /*sample_size_in_bytes*/,
-            sizeof(u16ref[0])*65536 /*num_in_bytes*/);
+    // Test i16.
+    accumulate_i16(i16ary, i16add, 65536);
+    checkAddedClamped(i16ary, i16ref, i16add, 65536, lim16neg,
+            lim16pos);
 
-    // expanded buffer must increase (or be zero)
-    checkMonotoneOrZero(u16expand, 65536*2);
+    // Test i32.
+    int32_t *i32ary = new int32_t[65536];
+    int32_t *i32add = new int32_t[65536];
+    int32_t *i32ref = new int32_t[65536];
+    // Convert sample data to i32 to perform accumulate function.
+    memcpy_to_i32_from_i16(i32ary, i16ref, 65536);
+    memcpy_to_i32_from_i16(i32add, i16add, 65536);
+    // Ensure the reference matches the inital output after conversion.
+    memcpy(i32ref, i32ary, 65536 * sizeof(i32ary[0]));
+    // Accumulate and check.
+    accumulate_i32(i32ary, i32add, 65536);
+    checkAddedClamped(
+            i32ary, i32ref, i32add, 65536, lim32neg, lim32pos);
+    // Cleanup
+    delete[] i32ref;
+    delete[] i32add;
+    delete[] i32ary;
 
-    // contract channels back to stereo.
-    adjust_channels(u16expand /*in_buff*/, 4 /*in_channels*/,
-            u16ary /*out_buff*/, 2 /*out_channels*/,
-            sizeof(u16expand[0]) /*sample_size_in_bytes*/,
-            sizeof(u16expand[0])*65536*2 /*num_in_bytes*/);
+    // Test u8.
+    uint8_t *u8ary = new uint8_t[65536];
+    uint8_t *u8add = new uint8_t[65536];
+    uint8_t *u8ref = new uint8_t[65536];
+    // Convert sample data to u8 to perform accumulate function.
+    memcpy_to_u8_from_i16(u8ary, i16ref, 65536);
+    memcpy_to_u8_from_i16(u8add, i16add, 65536);
+    // Ensure the reference matches the inital output after conversion.
+    memcpy(u8ref, u8ary, 65536 * sizeof(u8ary[0]));
+    // Accumulate and check.
+    accumulate_u8(u8ary, u8add, 65536);
+    checkAddedClampedu8(u8ary, u8ref, u8add, 65536);
+    // Cleanup.
+    delete[] u8ref;
+    delete[] u8add;
+    delete[] u8ary;
 
-    // must be identical to original.
-    EXPECT_EQ(0, memcmp(u16ary, u16ref, sizeof(u16ref[0])*65536));
+    // Test 24 bit packed.
+    uint8_t *pary = new uint8_t[65536 * 3];
+    uint8_t *padd = new uint8_t[65536 * 3];
+    uint8_t *pref = new uint8_t[65536 * 3];
+    // Convert sample data to p24 to perform accumulate function.
+    memcpy_to_p24_from_i16(pary, i16ref, 65536);
+    memcpy_to_p24_from_i16(padd, i16add, 65536);
+    // Ensure the reference matches the inital output after conversion.
+    memcpy(pref, pary, 65536 * sizeof(pary[0]) * 3);
+    // Accumulate and check.
+    accumulate_p24(pary, padd, 65536);
+    checkAddedClampedp24(pary, pref, padd, 65536);
+    // Cleanup.
+    delete[] pref;
+    delete[] padd;
+    delete[] pary;
 
-    delete[] u16ref;
-    delete[] u16expand;
-    delete[] u16ary;
+    // Test 24 bit unpacked.
+    int32_t *q8_23ary = new int32_t[65536];
+    int32_t *q8_23add = new int32_t[65536];
+    int32_t *q8_23ref = new int32_t[65536];
+    // Convert sample data to q8_23 to perform accumulate function.
+    memcpy_to_q8_23_from_i16(q8_23ary, i16ref, 65536);
+    memcpy_to_q8_23_from_i16(q8_23add, i16add, 65536);
+    // Ensure the reference matches the inital output after conversion.
+    memcpy(q8_23ref, q8_23ary, 65536 * sizeof(q8_23ary[0]));
+    // Accumulate and check.
+    accumulate_q8_23(q8_23ary, q8_23add, 65536);
+    checkAddedClamped(
+            q8_23ary, q8_23ref, q8_23add, 65536, lim24neg, lim24pos);
+    // Cleanup.
+    delete[] q8_23ref;
+    delete[] q8_23add;
+    delete[] q8_23ary;
+
+    // Test float.
+    float *fary = new float[65536];
+    float *fadd = new float[65536];
+    float *fref = new float[65536];
+    // Convert sample data to float to perform accumulate function.
+    memcpy_to_float_from_i16(fary, i16ref, 65536);
+    memcpy_to_float_from_i16(fadd, i16add, 65536);
+    // Ensure the reference matches the inital output after conversion.
+    memcpy(fref, fary, 65536 * sizeof(fary[0]));
+    // Accumulate and check. Floats aren't clamped by accumulate,
+    // but given the input is in the [-1.0, 1.0) range output should be in
+    // [-2.0, 2.0) range.
+    accumulate_float(fary, fadd, 65536);
+    checkAddedClamped(fary, fref, fadd, 65536, -2.0f, 2.0f);
+    // Cleanup.
+    delete[] fref;
+    delete[] fadd;
+    delete[] fary;
+
+    delete[] i16ary;
+    delete[] i16add;
+    delete[] i16ref;
+}
+
+
+TEST(audio_utils_primitives, MemcpyToFloatFromFloatWithClamping) {
+    std::vector<float> src = {-INFINITY, -2, -1, -0, 0, 0.009, 1.000001, 9999999, INFINITY, NAN};
+    std::vector<float> dst(src.size());
+    float absMax = 1;
+    std::vector<float> expected = {-1, -1, -1, -0, 0, 0.009, 1, 1, 1, 1};
+    ASSERT_EQ(expected.size(), src.size());
+
+    memcpy_to_float_from_float_with_clamping(dst.data(), src.data(), src.size(), absMax);
+
+    ASSERT_EQ(dst, expected) << "src=" << testing::PrintToString(src);
 }
diff --git a/audio_utils/tests/string_tests.cpp b/audio_utils/tests/string_tests.cpp
new file mode 100644 (file)
index 0000000..2977624
--- /dev/null
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "audio_utils_string_tests"
+
+#include <audio_utils/string.h>
+#include <gtest/gtest.h>
+
+// fills the string buffer with a increasing ramp of values from start.
+template <size_t size>
+void fill(char (&s)[size], int start) {
+    for (size_t i = 0; i < size - 1; ++i) {
+        s[i] = start++;
+    }
+    s[size - 1] = 0;
+}
+
+// checks that the fill counts from start, as expected to actual chars,
+// whereupon the rest is expected to be zeroes.
+template <size_t size>
+void check(char (&s)[size], int start, size_t actual) {
+    size_t lim = std::min(actual, size);
+    size_t i = 0;
+
+    if (lim > 0) {
+        for (; i < lim - 1; ++i) {
+            EXPECT_EQ(start, s[i]);
+            ++start;
+        }
+    }
+    for (; i < size; ++i) {
+        EXPECT_EQ(0, s[i]);
+    }
+}
+
+TEST(audio_utils_string, check_zero_fill) {
+    // we use string arrays whose size is known by compiler, not vectors
+    constexpr size_t STRING_SIZE = 50;
+    union {
+        char dst[STRING_SIZE];
+        char dst_mirror[STRING_SIZE + 10]; // verifier that we don't overwrite
+    };
+    char over[sizeof(dst) + 5];
+    char under[sizeof(dst) - 5];
+
+    // fill with a value ramp
+    constexpr int DST_START = 1;
+    constexpr int OVER_START = 2;
+    constexpr int UNDER_START = 3;
+    fill(dst_mirror, DST_START);
+    fill(over, OVER_START);
+    fill(under, UNDER_START);
+
+    // union should overlay dst and dst_mirror.
+    dst[sizeof(dst) - 1] = 0;
+    check(dst, DST_START, sizeof(dst));
+    EXPECT_EQ(sizeof(dst) + DST_START, dst_mirror[sizeof(dst)]);
+
+    // make sure we truncate when copying a larger string.
+    audio_utils_strlcpy_zerofill(dst, over);
+    check(dst, OVER_START, sizeof(dst));
+
+    // check we didn't overwrite
+    EXPECT_EQ(sizeof(dst) + DST_START, dst_mirror[sizeof(dst)]);
+
+    // make sure we fill remaining buffer with zeros.
+    audio_utils_strlcpy_zerofill(dst, under);
+    check(dst, UNDER_START, sizeof(under));
+
+    // check we didn't overwrite
+    EXPECT_EQ(sizeof(dst) + DST_START, dst_mirror[sizeof(dst)]);
+}
diff --git a/brillo/audio/audioservice/Android.mk b/brillo/audio/audioservice/Android.mk
deleted file mode 100644 (file)
index ada9fe2..0000000
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright 2016 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-LOCAL_PATH := $(call my-dir)
-
-audio_service_shared_libraries := \
-  libbinder \
-  libbinderwrapper \
-  libbrillo \
-  libbrillo-binder \
-  libc \
-  libchrome \
-  libaudioclient \
-  libutils
-
-audio_client_sources := \
-  aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl \
-  aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl \
-  audio_service_callback.cpp \
-  brillo_audio_client.cpp \
-  brillo_audio_client_helpers.cpp \
-  brillo_audio_device_info.cpp \
-  brillo_audio_device_info_internal.cpp \
-  brillo_audio_manager.cpp
-
-audio_service_sources := \
-  aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl \
-  aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl \
-  audio_daemon.cpp \
-  audio_device_handler.cpp \
-  audio_volume_handler.cpp \
-  brillo_audio_service_impl.cpp
-
-# Audio service.
-# =============================================================================
-include $(CLEAR_VARS)
-LOCAL_MODULE := brilloaudioservice
-LOCAL_SRC_FILES := \
-  $(audio_service_sources) \
-  main_audio_service.cpp
-LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
-LOCAL_SHARED_LIBRARIES := $(audio_service_shared_libraries)
-LOCAL_CFLAGS := -Werror -Wall
-LOCAL_INIT_RC := brilloaudioserv.rc
-include $(BUILD_EXECUTABLE)
-
-# Audio client library.
-# =============================================================================
-include $(CLEAR_VARS)
-LOCAL_MODULE := libbrilloaudio
-LOCAL_SRC_FILES := \
-  $(audio_client_sources)
-LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
-LOCAL_SHARED_LIBRARIES := $(audio_service_shared_libraries)
-LOCAL_CFLAGS := -Wall -Werror -std=c++14
-include $(BUILD_SHARED_LIBRARY)
-
-# Unit tests for the Brillo audio service.
-# =============================================================================
-include $(CLEAR_VARS)
-LOCAL_MODULE := brilloaudioservice_test
-LOCAL_SRC_FILES := \
-  $(audio_service_sources) \
-  test/audio_daemon_test.cpp \
-  test/audio_device_handler_test.cpp \
-  test/audio_volume_handler_test.cpp
-LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
-LOCAL_SHARED_LIBRARIES := \
-  $(audio_service_shared_libraries)
-LOCAL_STATIC_LIBRARIES := \
-  libBionicGtestMain \
-  libbinderwrapper_test_support \
-  libchrome_test_helpers \
-  libgmock
-LOCAL_CFLAGS := -Werror -Wall
-LOCAL_CFLAGS += -Wno-sign-compare
-include $(BUILD_NATIVE_TEST)
-
-# Unit tests for the Brillo audio client.
-# =============================================================================
-include $(CLEAR_VARS)
-LOCAL_MODULE := brilloaudioclient_test
-LOCAL_SRC_FILES := \
-  $(audio_client_sources) \
-  test/audio_service_callback_test.cpp \
-  test/brillo_audio_client_test.cpp \
-  test/brillo_audio_device_info_internal_test.cpp \
-  test/brillo_audio_manager_test.cpp
-LOCAL_AIDL_INCLUDES := $(LOCAL_PATH)/aidl
-LOCAL_SHARED_LIBRARIES := \
-  $(audio_service_shared_libraries)
-LOCAL_STATIC_LIBRARIES := \
-  libBionicGtestMain \
-  libbinderwrapper_test_support \
-  libchrome_test_helpers \
-  libgmock
-LOCAL_CFLAGS := -Wno-sign-compare -Wall -Werror
-include $(BUILD_NATIVE_TEST)
diff --git a/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl b/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IAudioServiceCallback.aidl
deleted file mode 100644 (file)
index 841c4ae..0000000
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package android.brillo.brilloaudioservice;
-
-/*
- * Interface for the callback object registered with IBrilloAudioService. Used
- * to notify clients about changes to the audio system.
- */
-interface IAudioServiceCallback {
-  // Oneway call triggered when audio devices are connected to the system.
-  oneway void OnAudioDevicesConnected(in int[] added_devices);
-
-  // Oneway call triggered when audio devices are disconnected from the system.
-  oneway void OnAudioDevicesDisconnected(in int[] removed_devices);
-
-  // Oneway call triggered when the volume is changed. If there are
-  // multiple active streams, this call will be called multiple times.
-  oneway void OnVolumeChanged(
-      int stream_type, int old_volume_index, int new_volume_index);
-}
diff --git a/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl b/brillo/audio/audioservice/aidl/android/brillo/brilloaudioservice/IBrilloAudioService.aidl
deleted file mode 100644 (file)
index 209b651..0000000
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package android.brillo.brilloaudioservice;
-
-import android.brillo.brilloaudioservice.IAudioServiceCallback;
-
-/*
- * Interface for BrilloAudioService that clients can use to get the list of
- * devices currently connected to the system as well as to control volume.
- * Clients can also register callbacks to be notified about changes.
- */
-interface IBrilloAudioService {
-  // Constants for device enumeration.
-  const int GET_DEVICES_INPUTS = 1;
-  const int GET_DEVICES_OUTPUTS = 2;
-
-  // Constants for volume control.
-  const int VOLUME_BUTTON_PRESS_DOWN = 1;
-  const int VOLUME_BUTTON_PRESS_UP = 2;
-
-  // Get the list of devices connected. If flag is GET_DEVICES_INPUTS, then
-  // return input devices. Otherwise, return output devices.
-  int[] GetDevices(int flag);
-
-  // Set device for a given usage.
-  // usage is an int of type audio_policy_force_use_t.
-  // config is an int of type audio_policy_forced_cfg_t.
-  void SetDevice(int usage, int config);
-
-  // Get the maximum number of steps used for a given stream.
-  int GetMaxVolumeSteps(int stream);
-
-  // Set the maximum number of steps to use for a given stream.
-  void SetMaxVolumeSteps(int stream, int max_steps);
-
-  // Set the volume for a given (stream, device) tuple.
-  void SetVolumeIndex(int stream, int device, int index);
-
-  // Get the current volume for a given (stream, device) tuple.
-  int GetVolumeIndex(int stream, int device);
-
-  // Get stream used when volume buttons are pressed.
-  int GetVolumeControlStream();
-
-  // Set default stream to use when volume buttons are pressed.
-  void SetVolumeControlStream(int stream);
-
-  // Increment volume.
-  void IncrementVolume();
-
-  // Decrement volume.
-  void DecrementVolume();
-
-  // Register a callback object with the service.
-  void RegisterServiceCallback(IAudioServiceCallback callback);
-
-  // Unregister a callback object.
-  void UnregisterServiceCallback(IAudioServiceCallback callback);
-}
diff --git a/brillo/audio/audioservice/audio_daemon.cpp b/brillo/audio/audioservice/audio_daemon.cpp
deleted file mode 100644 (file)
index 08ff548..0000000
+++ /dev/null
@@ -1,191 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of audio_daemon.h.
-
-#include "audio_daemon.h"
-
-#include <sysexits.h>
-
-#include <base/bind.h>
-#include <base/files/file_enumerator.h>
-#include <base/files/file_path.h>
-#include <base/time/time.h>
-#include <binderwrapper/binder_wrapper.h>
-#include <linux/input.h>
-
-#include "brillo_audio_service_impl.h"
-
-namespace brillo {
-
-static const char kAPSServiceName[] = "media.audio_policy";
-static const char kInputDeviceDir[] = "/dev/input";
-static const char kServiceName[] =
-    "android.brillo.brilloaudioservice.BrilloAudioService";
-
-AudioDaemon::~AudioDaemon() {}
-
-void AudioDaemon::InitializeHandlers() {
-  // Start and initialize the audio daemon handlers.
-  audio_device_handler_ =
-      std::shared_ptr<AudioDeviceHandler>(new AudioDeviceHandler());
-  audio_volume_handler_ =
-      std::unique_ptr<AudioVolumeHandler>(new AudioVolumeHandler());
-
-  // Register a callback with the audio device handler to call when device state
-  // changes.
-  auto device_callback =
-      base::Bind(&AudioDaemon::DeviceCallback, weak_ptr_factory_.GetWeakPtr());
-  audio_device_handler_->RegisterDeviceCallback(device_callback);
-
-  // Register a callback with the audio volume handler.
-  auto volume_callback =
-      base::Bind(&AudioDaemon::VolumeCallback, weak_ptr_factory_.GetWeakPtr());
-  audio_volume_handler_->RegisterCallback(volume_callback);
-
-  audio_device_handler_->Init(aps_);
-  audio_volume_handler_->Init(aps_);
-
-  // Poll on all files in kInputDeviceDir.
-  base::FileEnumerator fenum(base::FilePath(kInputDeviceDir),
-                             false /*recursive*/, base::FileEnumerator::FILES);
-  for (base::FilePath name = fenum.Next(); !name.empty(); name = fenum.Next()) {
-    base::File file(name, base::File::FLAG_OPEN | base::File::FLAG_READ);
-    if (file.IsValid()) {
-      MessageLoop* message_loop = MessageLoop::current();
-      int fd = file.GetPlatformFile();
-      // Move file to files_ and ensure that when binding we get a pointer from
-      // the object in files_.
-      files_.emplace(std::move(file));
-      base::Closure file_callback =
-          base::Bind(&AudioDaemon::EventCallback, weak_ptr_factory_.GetWeakPtr(),
-                     &files_.top());
-      message_loop->WatchFileDescriptor(fd, MessageLoop::kWatchRead,
-                                        true /*persistent*/, file_callback);
-    } else {
-      LOG(WARNING) << "Could not open " << name.value() << " for reading. ("
-                   << base::File::ErrorToString(file.error_details()) << ")";
-    }
-  }
-
-  handlers_initialized_ = true;
-  // Once the handlers have been initialized, we can register with service
-  // manager.
-  InitializeBrilloAudioService();
-}
-
-void AudioDaemon::InitializeBrilloAudioService() {
-  brillo_audio_service_ = new BrilloAudioServiceImpl();
-  brillo_audio_service_->RegisterHandlers(
-      std::weak_ptr<AudioDeviceHandler>(audio_device_handler_),
-      std::weak_ptr<AudioVolumeHandler>(audio_volume_handler_));
-  android::BinderWrapper::Get()->RegisterService(kServiceName,
-                                                 brillo_audio_service_);
-  VLOG(1) << "Registered brilloaudioservice with the service manager.";
-}
-
-void AudioDaemon::ConnectToAPS() {
-  android::BinderWrapper* binder_wrapper = android::BinderWrapper::Get();
-  auto binder = binder_wrapper->GetService(kAPSServiceName);
-  // If we didn't get the audio policy service, try again in 500 ms.
-  if (!binder.get()) {
-    LOG(INFO) << "Could not connect to audio policy service. Trying again...";
-    brillo::MessageLoop::current()->PostDelayedTask(
-        base::Bind(&AudioDaemon::ConnectToAPS, weak_ptr_factory_.GetWeakPtr()),
-        base::TimeDelta::FromMilliseconds(500));
-    return;
-  }
-  LOG(INFO) << "Connected to audio policy service.";
-  binder_wrapper->RegisterForDeathNotifications(
-      binder,
-      base::Bind(&AudioDaemon::OnAPSDisconnected,
-                 weak_ptr_factory_.GetWeakPtr()));
-  VLOG(1) << "Registered death notification.";
-  aps_ = android::interface_cast<android::IAudioPolicyService>(binder);
-  if (!handlers_initialized_) {
-    InitializeHandlers();
-  } else {
-    audio_device_handler_->APSConnect(aps_);
-    audio_volume_handler_->APSConnect(aps_);
-  }
-}
-
-void AudioDaemon::OnAPSDisconnected() {
-  LOG(INFO) << "Audio policy service died. Will try to reconnect.";
-  audio_device_handler_->APSDisconnect();
-  audio_volume_handler_->APSDisconnect();
-  aps_ = nullptr;
-  ConnectToAPS();
-}
-
-// OnInit, we want to do the following:
-//   - Get a binder to the audio policy service.
-//   - Initialize the audio device and volume handlers.
-//   - Set up polling on files in /dev/input.
-int AudioDaemon::OnInit() {
-  int exit_code = Daemon::OnInit();
-  if (exit_code != EX_OK) return exit_code;
-  // Initialize a binder wrapper.
-  android::BinderWrapper::Create();
-  // Initialize a binder watcher.
-  binder_watcher_.Init();
-  ConnectToAPS();
-  return EX_OK;
-}
-
-void AudioDaemon::EventCallback(base::File* file) {
-  input_event event;
-  int bytes_read =
-      file->ReadAtCurrentPos(reinterpret_cast<char*>(&event), sizeof(event));
-  if (bytes_read != sizeof(event)) {
-    LOG(WARNING) << "Couldn't read an input event.";
-    return;
-  }
-  audio_device_handler_->ProcessEvent(event);
-  audio_volume_handler_->ProcessEvent(event);
-}
-
-void AudioDaemon::DeviceCallback(
-    AudioDeviceHandler::DeviceConnectionState state,
-    const std::vector<int>& devices) {
-  VLOG(1) << "Triggering device callback.";
-  if (!brillo_audio_service_.get()) {
-    LOG(ERROR) << "The Brillo audio service object is unavailble. Will try to "
-               << "call the clients again once the service is up.";
-    InitializeBrilloAudioService();
-    DeviceCallback(state, devices);
-    return;
-  }
-  if (state == AudioDeviceHandler::DeviceConnectionState::kDevicesConnected)
-    brillo_audio_service_->OnDevicesConnected(devices);
-  else
-    brillo_audio_service_->OnDevicesDisconnected(devices);
-}
-
-void AudioDaemon::VolumeCallback(audio_stream_type_t stream,
-                                 int previous_index,
-                                 int current_index) {
-  VLOG(1) << "Triggering volume button press callback.";
-  if (!brillo_audio_service_.get()) {
-    LOG(ERROR) << "The Brillo audio service object is unavailble. Will try to "
-               << "call the clients again once the service is up.";
-    InitializeBrilloAudioService();
-    VolumeCallback(stream, previous_index, current_index);
-    return;
-  }
-  brillo_audio_service_->OnVolumeChanged(stream, previous_index, current_index);
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/audio_daemon.h b/brillo/audio/audioservice/audio_daemon.h
deleted file mode 100644 (file)
index 5fc01fd..0000000
+++ /dev/null
@@ -1,112 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Main loop of the brillo audio service.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_H_
-
-#include <memory>
-#include <stack>
-#include <vector>
-
-#include <base/files/file.h>
-#include <base/memory/weak_ptr.h>
-#include <brillo/binder_watcher.h>
-#include <brillo/daemons/daemon.h>
-#include <media/IAudioPolicyService.h>
-
-#include "audio_device_handler.h"
-#include "audio_volume_handler.h"
-#include "brillo_audio_service.h"
-
-namespace brillo {
-
-class AudioDaemon : public Daemon {
- public:
-  AudioDaemon() {}
-  virtual ~AudioDaemon();
-
- protected:
-  // Initialize the audio daemon handlers and start pollig the files in
-  // /dev/input.
-  int OnInit() override;
-
- private:
-  friend class AudioDaemonTest;
-  FRIEND_TEST(AudioDaemonTest, RegisterService);
-  FRIEND_TEST(AudioDaemonTest, TestAPSConnectInitializesHandlersOnlyOnce);
-  FRIEND_TEST(AudioDaemonTest, TestDeviceCallbackInitializesBASIfNULL);
-
-  // Callback function for input events. Events are handled by the audio device
-  // handler.
-  void EventCallback(base::File* file);
-
-  // Callback function for device state changes. Events are handler by the
-  // audio service.
-  //
-  // |mode| is kDevicesConnected when |devices| are connected.
-  // |devices| is a vector of integers representing audio_devices_t.
-  void DeviceCallback(AudioDeviceHandler::DeviceConnectionState,
-                      const std::vector<int>& devices);
-
-  // Callback function when volume changes.
-  //
-  // |stream| is an audio_stream_type_t representing the stream.
-  // |previous_index| is the volume index before the key press.
-  // |current_index| is the volume index after the key press.
-  void VolumeCallback(audio_stream_type_t stream,
-                      int previous_index,
-                      int current_index);
-
-  // Callback function for audio policy service death notification.
-  void OnAPSDisconnected();
-
-  // Connect to the audio policy service and register a callback to be invoked
-  // if the audio policy service dies.
-  void ConnectToAPS();
-
-  // Register the brillo audio service with the service manager.
-  void InitializeBrilloAudioService();
-
-  // Initialize all audio daemon handlers.
-  //
-  // Note: This can only occur after we have connected to the audio policy
-  // service.
-  virtual void InitializeHandlers();
-
-  // Store the file objects that are created during initialization for the files
-  // being polled. This is done so these objects can be freed when the
-  // AudioDaemon object is destroyed.
-  std::stack<base::File> files_;
-  // Handler for audio device input events.
-  std::shared_ptr<AudioDeviceHandler> audio_device_handler_;
-  // Handler for volume key press input events.
-  std::shared_ptr<AudioVolumeHandler> audio_volume_handler_;
-  // Used to generate weak_ptr to AudioDaemon for use in base::Bind.
-  base::WeakPtrFactory<AudioDaemon> weak_ptr_factory_{this};
-  // Pointer to the audio policy service.
-  android::sp<android::IAudioPolicyService> aps_;
-  // Flag to indicate whether the handlers have been initialized.
-  bool handlers_initialized_ = false;
-  // Binder watcher to watch for binder messages.
-  BinderWatcher binder_watcher_;
-  // Brillo audio service. Used for scheduling callbacks to clients.
-  android::sp<BrilloAudioService> brillo_audio_service_;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_H_
diff --git a/brillo/audio/audioservice/audio_daemon_handler.h b/brillo/audio/audioservice/audio_daemon_handler.h
deleted file mode 100644 (file)
index ea147c2..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Handler for input events in /dev/input. AudioDaemonHandler is the base class
-// that other handlers inherit.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_HANDLER_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_HANDLER_H_
-
-#include <linux/input.h>
-#include <media/IAudioPolicyService.h>
-
-namespace brillo {
-
-class AudioDaemonHandler {
- public:
-  virtual ~AudioDaemonHandler(){};
-
-  // Initialize the handler.
-  //
-  // |aps| is a pointer to the binder object.
-  virtual void Init(android::sp<android::IAudioPolicyService> aps) = 0;
-
-  // Process input events from the kernel.
-  //
-  // |event| is a pointer to an input_event. This function should be able to
-  // gracefully handle input events that are not relevant to the functionality
-  // provided by this class.
-  virtual void ProcessEvent(const struct input_event& event) = 0;
-
-  // Inform the handler that the audio policy service has been disconnected.
-  virtual void APSDisconnect() = 0;
-
-  // Inform the handler that the audio policy service is reconnected.
-  //
-  // |aps| is a pointer to the binder object.
-  virtual void APSConnect(android::sp<android::IAudioPolicyService> aps) = 0;
-
- protected:
-  // Pointer to the audio policy service.
-  android::sp<android::IAudioPolicyService> aps_;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DAEMON_HANDLER_H_
diff --git a/brillo/audio/audioservice/audio_device_handler.cpp b/brillo/audio/audioservice/audio_device_handler.cpp
deleted file mode 100644 (file)
index dc7e454..0000000
+++ /dev/null
@@ -1,233 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of audio_device_handler.h
-
-#include "audio_device_handler.h"
-
-#include <base/files/file.h>
-#include <base/logging.h>
-#include <brillo/message_loops/message_loop.h>
-#include <media/AudioSystem.h>
-
-namespace brillo {
-
-// All input devices currently supported by AudioDeviceHandler.
-const std::vector<audio_devices_t> AudioDeviceHandler::kSupportedInputDevices_ =
-    {AUDIO_DEVICE_IN_WIRED_HEADSET};
-
-const std::vector<audio_devices_t>
-    AudioDeviceHandler::kSupportedOutputDevices_ = {
-        AUDIO_DEVICE_OUT_WIRED_HEADSET, AUDIO_DEVICE_OUT_WIRED_HEADPHONE};
-
-static const char kH2WStateFile[] = "/sys/class/switch/h2w/state";
-
-AudioDeviceHandler::AudioDeviceHandler() {
-  headphone_ = false;
-  microphone_ = false;
-}
-
-AudioDeviceHandler::~AudioDeviceHandler() {}
-
-void AudioDeviceHandler::GetInputDevices(std::vector<int>* devices_list) {
-  std::copy(connected_input_devices_.begin(),
-            connected_input_devices_.end(),
-            std::back_inserter(*devices_list));
-}
-
-void AudioDeviceHandler::GetOutputDevices(std::vector<int>* devices_list) {
-  std::copy(connected_output_devices_.begin(),
-            connected_output_devices_.end(),
-            std::back_inserter(*devices_list));
-}
-
-void AudioDeviceHandler::RegisterDeviceCallback(
-      base::Callback<void(DeviceConnectionState,
-                          const std::vector<int>& )>& callback) {
-  callback_ = callback;
-}
-
-void AudioDeviceHandler::TriggerCallback(DeviceConnectionState state) {
-  // If no devices have changed, don't bother triggering a callback.
-  if (changed_devices_.size() == 0)
-    return;
-  base::Closure closure = base::Bind(callback_, state, changed_devices_);
-  MessageLoop::current()->PostTask(closure);
-  // We can clear changed_devices_ here since base::Bind makes a copy of
-  // changed_devices_.
-  changed_devices_.clear();
-}
-
-void AudioDeviceHandler::APSDisconnect() {
-  aps_.clear();
-}
-
-void AudioDeviceHandler::APSConnect(
-    android::sp<android::IAudioPolicyService> aps) {
-  aps_ = aps;
-  // Reset the state
-  connected_input_devices_.clear();
-  connected_output_devices_.clear();
-  // Inform audio policy service about the currently connected devices.
-  VLOG(1) << "Calling GetInitialAudioDeviceState on APSConnect.";
-  GetInitialAudioDeviceState(base::FilePath(kH2WStateFile));
-}
-
-void AudioDeviceHandler::Init(android::sp<android::IAudioPolicyService> aps) {
-  aps_ = aps;
-  // Reset audio policy service state in case this service crashed and there is
-  // a mismatch between the current system state and what audio policy service
-  // was previously told.
-  VLOG(1) << "Calling DisconnectAllSupportedDevices.";
-  DisconnectAllSupportedDevices();
-  TriggerCallback(kDevicesDisconnected);
-
-  // Get headphone jack state and update audio policy service with new state.
-  VLOG(1) << "Calling ReadInitialAudioDeviceState.";
-  GetInitialAudioDeviceState(base::FilePath(kH2WStateFile));
-}
-
-void AudioDeviceHandler::GetInitialAudioDeviceState(
-    const base::FilePath& path) {
-  base::File file(path, base::File::FLAG_OPEN | base::File::FLAG_READ);
-  if (!file.IsValid()) {
-    LOG(WARNING) << "Kernel does not have wired headset support. Could not "
-                 << "open " << path.value() << " ("
-                 << base::File::ErrorToString(file.error_details()) << ").";
-    return;
-  }
-  int state = 0;
-  int bytes_read = file.ReadAtCurrentPos(reinterpret_cast<char*>(&state), 1);
-  state -= '0';
-  if (bytes_read == 0) {
-    LOG(WARNING) << "Could not read from " << path.value();
-    return;
-  }
-  VLOG(1) << "Initial audio jack state is " << state;
-  static const int kHeadPhoneMask = 0x1;
-  bool headphone = state & kHeadPhoneMask;
-  static const int kMicrophoneMask = 0x2;
-  bool microphone = (state & kMicrophoneMask) >> 1;
-
-  UpdateAudioSystem(headphone, microphone);
-}
-
-void AudioDeviceHandler::NotifyAudioPolicyService(
-    audio_devices_t device, audio_policy_dev_state_t state) {
-  if (aps_ == nullptr) {
-    LOG(INFO) << "Audio device handler cannot call audio policy service. Will "
-              << "try again later.";
-    return;
-  }
-  VLOG(1) << "Calling Audio Policy Service to change " << device << " to state "
-          << state;
-  aps_->setDeviceConnectionState(device, state, "", "");
-}
-
-int AudioDeviceHandler::SetDevice(audio_policy_force_use_t usage,
-                                  audio_policy_forced_cfg_t config) {
-  if (aps_ == nullptr) {
-    LOG(WARNING) << "Audio policy service cannot be reached. Please try again.";
-    return EAGAIN;
-  }
-  VLOG(1) << "Calling audio policy service to set " << usage << " to "
-          << config;
-  return aps_->setForceUse(usage, config);
-}
-
-void AudioDeviceHandler::ConnectAudioDevice(audio_devices_t device) {
-  audio_policy_dev_state_t state = AUDIO_POLICY_DEVICE_STATE_AVAILABLE;
-  NotifyAudioPolicyService(device, state);
-  if (audio_is_input_device(device))
-    connected_input_devices_.insert(device);
-  else
-    connected_output_devices_.insert(device);
-  changed_devices_.push_back(device);
-}
-
-void AudioDeviceHandler::DisconnectAudioDevice(audio_devices_t device) {
-  audio_policy_dev_state_t state = AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE;
-  NotifyAudioPolicyService(device, state);
-  if (audio_is_input_device(device))
-    connected_input_devices_.erase(device);
-  else
-    connected_output_devices_.erase(device);
-  changed_devices_.push_back(device);
-}
-
-void AudioDeviceHandler::DisconnectAllSupportedDevices() {
-  for (auto device : kSupportedInputDevices_) {
-    DisconnectAudioDevice(device);
-  }
-  for (auto device : kSupportedOutputDevices_) {
-    DisconnectAudioDevice(device);
-  }
-}
-
-void AudioDeviceHandler::DisconnectAllConnectedDevices() {
-  while (!connected_input_devices_.empty()) {
-    audio_devices_t device = *(connected_input_devices_.begin());
-    DisconnectAudioDevice(device);
-  }
-  while (!connected_output_devices_.empty()) {
-    audio_devices_t device = *(connected_output_devices_.begin());
-    DisconnectAudioDevice(device);
-  }
-}
-
-void AudioDeviceHandler::UpdateAudioSystem(bool headphone, bool microphone) {
-  if (microphone) {
-    ConnectAudioDevice(AUDIO_DEVICE_IN_WIRED_HEADSET);
-  }
-  if (headphone && microphone) {
-    ConnectAudioDevice(AUDIO_DEVICE_OUT_WIRED_HEADSET);
-  } else if (headphone) {
-    ConnectAudioDevice(AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
-  } else if (!microphone) {
-    // No devices are connected. Inform the audio policy service that all
-    // connected devices have been disconnected.
-    DisconnectAllConnectedDevices();
-    TriggerCallback(kDevicesDisconnected);
-    return;
-  }
-  TriggerCallback(kDevicesConnected);
-  return;
-}
-
-void AudioDeviceHandler::ProcessEvent(const struct input_event& event) {
-  VLOG(1) << event.type << " " << event.code << " " << event.value;
-  if (event.type == EV_SW) {
-    switch (event.code) {
-      case SW_HEADPHONE_INSERT:
-        headphone_ = event.value;
-        break;
-      case SW_MICROPHONE_INSERT:
-        microphone_ = event.value;
-        break;
-      default:
-        // This event code is not supported by this handler.
-        break;
-    }
-  } else if (event.type == EV_SYN) {
-    // We have received all input events. Update the audio system.
-    UpdateAudioSystem(headphone_, microphone_);
-    // Reset the headphone and microphone flags that are used to track
-    // information across multiple calls to ProcessEvent.
-    headphone_ = false;
-    microphone_ = false;
-  }
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/audio_device_handler.h b/brillo/audio/audioservice/audio_device_handler.h
deleted file mode 100644 (file)
index af20420..0000000
+++ /dev/null
@@ -1,201 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Handler for input events in /dev/input. AudioDeviceHandler handles events
-// only for audio devices being plugged in/removed from the system. Implements
-// some of the functionality present in WiredAccessoryManager.java.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DEVICE_HANDLER_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DEVICE_HANDLER_H_
-
-#include <set>
-#include <vector>
-
-#include <base/bind.h>
-#include <base/files/file_path.h>
-#include <gtest/gtest_prod.h>
-#include <linux/input.h>
-#include <media/IAudioPolicyService.h>
-#include <system/audio.h>
-#include <system/audio_policy.h>
-
-#include "audio_daemon_handler.h"
-
-namespace brillo {
-
-class AudioDeviceHandler : public AudioDaemonHandler {
- public:
-  AudioDeviceHandler();
-  virtual ~AudioDeviceHandler();
-
-  // Get the current state of the headset jack and update AudioSystem based on
-  // the initial state.
-  //
-  // |aps| is a pointer to the binder object.
-  virtual void Init(android::sp<android::IAudioPolicyService> aps) override;
-
-  // Process input events from the kernel. Connecting/disconnecting an audio
-  // device will result in multiple calls to this method.
-  //
-  // |event| is a pointer to an input_event. This function should be able to
-  // gracefully handle input events that are not relevant to the functionality
-  // provided by this class.
-  virtual void ProcessEvent(const struct input_event& event) override;
-
-  // Inform the handler that the audio policy service has been disconnected.
-  void APSDisconnect();
-
-  // Inform the handler that the audio policy service is reconnected.
-  //
-  // |aps| is a pointer to the binder object.
-  virtual void APSConnect(
-      android::sp<android::IAudioPolicyService> aps) override;
-
-  // Get the list of connected devices.
-  //
-  // |devices_list| is the vector to copy list of connected input devices to.
-  void GetInputDevices(std::vector<int>* devices_list);
-
-  // Get the list of connected output devices.
-  //
-  // |devices_list| is the vector to copy the list of connected output devices
-  // to.
-  void GetOutputDevices(std::vector<int>* devices_list);
-
-  // Set device.
-  //
-  // |usage| is an int of type audio_policy_force_use_t
-  // |config| is an int of type audio_policy_forced_cfg_t.
-  //
-  // Returns 0 on sucess and errno on failure.
-  int SetDevice(audio_policy_force_use_t usage,
-                audio_policy_forced_cfg_t config);
-
-  // Enum used to represent whether devices are being connected or not. This is
-  // used when triggering callbacks.
-  enum DeviceConnectionState {
-    kDevicesConnected,
-    kDevicesDisconnected
-  };
-
-  // Register a callback function to call when device state changes.
-  //
-  // |callback| is an object of type base::Callback that accepts a
-  // DeviceConnectionState and a vector of ints. See DeviceCallback() in
-  // audio_daemon.h.
-  void RegisterDeviceCallback(
-      base::Callback<void(DeviceConnectionState,
-                          const std::vector<int>& )>& callback);
-
- private:
-  friend class AudioDeviceHandlerTest;
-  friend class AudioVolumeHandler;
-  friend class AudioVolumeHandlerTest;
-  FRIEND_TEST(AudioDeviceHandlerTest,
-              DisconnectAllSupportedDevicesCallsDisconnect);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitCallsDisconnectAllSupportedDevices);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateMic);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadphone);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadset);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateNone);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateInvalid);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventEmpty);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophonePresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphonePresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophoneNotPresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphoneNotPresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventInvalid);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemNone);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectMic);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadphone);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadset);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectMic);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadphone);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadset);
-  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceInput);
-  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceOutput);
-  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceInput);
-  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceOutput);
-  FRIEND_TEST(AudioVolumeHandlerTest, FileGeneration);
-
-  // Read the initial state of audio devices in /sys/class/* and update
-  // the audio policy service.
-  //
-  // |path| is the file that contains the initial audio jack state.
-  void GetInitialAudioDeviceState(const base::FilePath& path);
-
-  // Update the audio policy service once an input_event has completed.
-  //
-  // |headphone| is true is headphones are connected.
-  // |microphone| is true is microphones are connected.
-  void UpdateAudioSystem(bool headphone, bool microphone);
-
-  // Notify the audio policy service that this device has been removed.
-  //
-  // |device| is the audio device whose state is to be changed.
-  // |state| is the current state of |device|.
-  virtual void NotifyAudioPolicyService(audio_devices_t device,
-                                        audio_policy_dev_state_t state);
-
-  // Connect an audio device by calling aps and add it to the appropriate set
-  // (either connected_input_devices_ or connected_output_devices_).
-  //
-  // |device| is the audio device that has been added.
-  void ConnectAudioDevice(audio_devices_t device);
-
-  // Disconnect an audio device by calling aps and remove it from the
-  // appropriate set (either connected_input_devices_ or
-  // connected_output_devices_).
-  //
-  // |device| is the audio device that has been disconnected.
-  void DisconnectAudioDevice(audio_devices_t device);
-
-  // Disconnected all connected audio devices.
-  void DisconnectAllConnectedDevices();
-
-  // Disconnect all supported audio devices.
-  void DisconnectAllSupportedDevices();
-
-  // Trigger a callback when a device is either connected or disconnected.
-  //
-  // |state| is kDevicesConnected when |devices| are being connected.
-  virtual void TriggerCallback(DeviceConnectionState state);
-
-  // All input devices currently supported by AudioDeviceHandler.
-  static const std::vector<audio_devices_t> kSupportedInputDevices_;
-  // All output devices currently supported by AudioDeviceHandler.
-  static const std::vector<audio_devices_t> kSupportedOutputDevices_;
-
- protected:
-  // Set of connected input devices.
-  std::set<audio_devices_t> connected_input_devices_;
-  // Set of connected output devices.
-  std::set<audio_devices_t> connected_output_devices_;
-  // Vector of devices changed (used for callbacks to clients).
-  std::vector<int> changed_devices_;
-  // Keeps track of whether a headphone has been connected. Used by ProcessEvent
-  // and UpdateAudioSystem.
-  bool headphone_;
-  // Keeps track of whether a microphone has been connected. Used by
-  // ProcessEvent and UpdateAudioSystem.
-  bool microphone_;
-  // Callback object to call when device state changes.
-  base::Callback<void(DeviceConnectionState,
-                      const std::vector<int>& )> callback_;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_DEVICE_HANDLER_H_
diff --git a/brillo/audio/audioservice/audio_service_callback.cpp b/brillo/audio/audioservice/audio_service_callback.cpp
deleted file mode 100644 (file)
index 3baee23..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of audio_service_callback.
-
-#include "audio_service_callback.h"
-
-#include <base/bind.h>
-#include <base/logging.h>
-
-#include "brillo_audio_client_helpers.h"
-#include "brillo_audio_device_info_def.h"
-
-using android::binder::Status;
-
-namespace brillo {
-
-AudioServiceCallback::AudioServiceCallback(const BAudioCallback* callback,
-                                           void* user_data) {
-  connected_callback_ = base::Bind(callback->OnAudioDeviceAdded);
-  disconnected_callback_ = base::Bind(callback->OnAudioDeviceRemoved);
-  volume_callback_ = base::Bind(callback->OnVolumeChanged);
-  user_data_ = user_data;
-}
-
-Status AudioServiceCallback::OnAudioDevicesConnected(
-    const std::vector<int>& devices) {
-  for (auto device : devices) {
-    BAudioDeviceInfo device_info;
-    device_info.internal_ = std::unique_ptr<BAudioDeviceInfoInternal>(
-        BAudioDeviceInfoInternal::CreateFromAudioDevicesT(device));
-    connected_callback_.Run(&device_info, user_data_);
-  }
-  return Status::ok();
-}
-
-Status AudioServiceCallback::OnAudioDevicesDisconnected(
-    const std::vector<int>& devices) {
-  for (auto device : devices) {
-    BAudioDeviceInfo device_info;
-    device_info.internal_ = std::unique_ptr<BAudioDeviceInfoInternal>(
-        BAudioDeviceInfoInternal::CreateFromAudioDevicesT(device));
-    disconnected_callback_.Run(&device_info, user_data_);
-  }
-  return Status::ok();
-}
-
-Status AudioServiceCallback::OnVolumeChanged(int stream,
-                                             int previous_index,
-                                             int current_index) {
-  auto usage = BrilloAudioClientHelpers::GetBAudioUsage(
-      static_cast<audio_stream_type_t>(stream));
-  volume_callback_.Run(usage, previous_index, current_index, user_data_);
-  return Status::ok();
-}
-
-bool AudioServiceCallback::Equals(const android::sp<AudioServiceCallback>& callback) {
-  if (callback->connected_callback_.Equals(connected_callback_) &&
-      callback->disconnected_callback_.Equals(disconnected_callback_) &&
-      callback->volume_callback_.Equals(volume_callback_) &&
-      callback->user_data_ == user_data_)
-    return true;
-  return false;
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/audio_service_callback.h b/brillo/audio/audioservice/audio_service_callback.h
deleted file mode 100644 (file)
index 3a5a289..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Callback object to be passed to brilloaudioservice.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_SERVICE_CALLBACK_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_SERVICE_CALLBACK_H_
-
-#include <vector>
-
-#include <base/callback.h>
-#include <binder/Status.h>
-
-#include "android/brillo/brilloaudioservice/BnAudioServiceCallback.h"
-#include "include/brillo_audio_manager.h"
-
-using android::binder::Status;
-using android::brillo::brilloaudioservice::BnAudioServiceCallback;
-
-namespace brillo {
-
-class AudioServiceCallback : public BnAudioServiceCallback {
- public:
-  // Constructor for AudioServiceCallback.
-  //
-  // |callback| is an object of type BAudioCallback.
-  // |user_data| is an object to be passed to the callbacks.
-  AudioServiceCallback(const BAudioCallback* callback, void* user_data);
-
-  // Callback function triggered when a device is connected.
-  //
-  // |devices| is a vector of audio_devices_t.
-  Status OnAudioDevicesConnected(const std::vector<int>& devices);
-
-  // Callback function triggered when a device is disconnected.
-  //
-  // |devices| is a vector of audio_devices_t.
-  Status OnAudioDevicesDisconnected(const std::vector<int>& devices);
-
-  // Callback function triggered when volume is changed.
-  //
-  // |stream| is an int representing the stream.
-  // |previous_index| is the volume index before the key press.
-  // |current_index| is the volume index after the key press.
-  Status OnVolumeChanged(int stream, int previous_index, int current_index);
-
-  // Method to compare two AudioServiceCallback objects.
-  //
-  // |callback| is a ref counted pointer to a AudioServiceCallback object to be
-  // compared with this.
-  //
-  // Returns true if |callback| equals this.
-  bool Equals(const android::sp<AudioServiceCallback>& callback);
-
- private:
-  // Callback when devices are connected.
-  base::Callback<void(const BAudioDeviceInfo*, void*)> connected_callback_;
-  // Callback when devices are disconnected.
-  base::Callback<void(const BAudioDeviceInfo*, void*)> disconnected_callback_;
-  // Callback when the volume button is pressed.
-  base::Callback<void(BAudioUsage, int, int, void*)> volume_callback_;
-  // User data passed to the callbacks.
-  void* user_data_;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_SERVICE_CALLBACK_H_
diff --git a/brillo/audio/audioservice/audio_volume_handler.cpp b/brillo/audio/audioservice/audio_volume_handler.cpp
deleted file mode 100644 (file)
index d95b2c2..0000000
+++ /dev/null
@@ -1,236 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of audio_volume_handler.h
-
-#include "audio_volume_handler.h"
-
-#include <base/files/file.h>
-#include <base/files/file_util.h>
-#include <base/logging.h>
-#include <brillo/map_utils.h>
-#include <brillo/message_loops/message_loop.h>
-#include <brillo/strings/string_utils.h>
-
-#include "audio_device_handler.h"
-
-namespace brillo {
-
-static const char kVolumeStateFilePath[] =
-    "/data/misc/brilloaudioservice/volume.dat";
-
-AudioVolumeHandler::AudioVolumeHandler() {
-  for (auto stream : kSupportedStreams_) {
-    step_sizes_.emplace(stream, kDefaultStepSize_);
-  }
-  selected_stream_ = AUDIO_STREAM_DEFAULT;
-  volume_state_file_ = base::FilePath(kVolumeStateFilePath);
-}
-
-AudioVolumeHandler::~AudioVolumeHandler() {}
-
-void AudioVolumeHandler::APSDisconnect() { aps_.clear(); }
-
-void AudioVolumeHandler::APSConnect(
-    android::sp<android::IAudioPolicyService> aps) {
-  aps_ = aps;
-  InitAPSAllStreams();
-}
-
-void AudioVolumeHandler::RegisterCallback(
-    base::Callback<void(audio_stream_type_t, int, int)>& callback) {
-  callback_ = callback;
-}
-
-int AudioVolumeHandler::ConvertToUserDefinedIndex(audio_stream_type_t stream,
-                                                  int index) {
-  return index / step_sizes_[stream];
-}
-
-int AudioVolumeHandler::ConvertToInternalIndex(audio_stream_type_t stream,
-                                               int index) {
-  return index * step_sizes_[stream];
-}
-
-void AudioVolumeHandler::TriggerCallback(audio_stream_type_t stream,
-                                         int previous_index,
-                                         int current_index) {
-  int user_defined_previous_index =
-      ConvertToUserDefinedIndex(stream, previous_index);
-  int user_defined_current_index =
-      ConvertToUserDefinedIndex(stream, current_index);
-  MessageLoop::current()->PostTask(base::Bind(callback_,
-                                              stream,
-                                              user_defined_previous_index,
-                                              user_defined_current_index));
-}
-
-void AudioVolumeHandler::GenerateVolumeFile() {
-  for (auto stream : kSupportedStreams_) {
-    for (auto device : AudioDeviceHandler::kSupportedOutputDevices_) {
-      PersistVolumeConfiguration(stream, device, kDefaultCurrentIndex_);
-    }
-  }
-  if (!kv_store_->Save(volume_state_file_)) {
-    LOG(ERROR) << "Could not save volume data file!";
-  }
-}
-
-int AudioVolumeHandler::GetVolumeMaxSteps(audio_stream_type_t stream) {
-  return ConvertToUserDefinedIndex(stream, kMaxIndex_);
-}
-
-int AudioVolumeHandler::SetVolumeMaxSteps(audio_stream_type_t stream,
-                                          int max_steps) {
-  if (max_steps <= kMinIndex_ || max_steps > kMaxIndex_)
-    return EINVAL;
-  step_sizes_[stream] = kMaxIndex_ / max_steps;
-  return 0;
-}
-
-int AudioVolumeHandler::GetVolumeCurrentIndex(audio_stream_type_t stream,
-                                              audio_devices_t device) {
-  auto key = kCurrentIndexKey_ + "." + string_utils::ToString(stream) + "." +
-             string_utils::ToString(device);
-  std::string value;
-  kv_store_->GetString(key, &value);
-  return std::stoi(value);
-}
-
-int AudioVolumeHandler::GetVolumeIndex(audio_stream_type_t stream,
-                                       audio_devices_t device) {
-  return ConvertToUserDefinedIndex(stream,
-                                   GetVolumeCurrentIndex(stream, device));
-}
-
-int AudioVolumeHandler::SetVolumeIndex(audio_stream_type_t stream,
-                                       audio_devices_t device,
-                                       int index) {
-  if (index < kMinIndex_ ||
-      index > ConvertToUserDefinedIndex(stream, kMaxIndex_))
-    return EINVAL;
-  int previous_index = GetVolumeCurrentIndex(stream, device);
-  int current_absolute_index = ConvertToInternalIndex(stream, index);
-  PersistVolumeConfiguration(stream, device, current_absolute_index);
-  TriggerCallback(stream, previous_index, current_absolute_index);
-  return 0;
-}
-
-void AudioVolumeHandler::PersistVolumeConfiguration(audio_stream_type_t stream,
-                                                    audio_devices_t device,
-                                                    int index) {
-  auto key = kCurrentIndexKey_ + "." + string_utils::ToString(stream) + "." +
-             string_utils::ToString(device);
-  kv_store_->SetString(key, string_utils::ToString(index));
-  kv_store_->Save(volume_state_file_);
-}
-
-void AudioVolumeHandler::InitAPSAllStreams() {
-  for (auto stream : kSupportedStreams_) {
-    aps_->initStreamVolume(stream, kMinIndex_, kMaxIndex_);
-    for (auto device : AudioDeviceHandler::kSupportedOutputDevices_) {
-      int current_index = GetVolumeCurrentIndex(stream, device);
-      aps_->setStreamVolumeIndex(stream, current_index, device);
-    }
-  }
-}
-
-void AudioVolumeHandler::SetVolumeFilePathForTesting(
-    const base::FilePath& path) {
-  volume_state_file_ = path;
-}
-
-void AudioVolumeHandler::Init(android::sp<android::IAudioPolicyService> aps) {
-  aps_ = aps;
-  kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore());
-  if (!base::PathExists(volume_state_file_)) {
-    // Generate key-value store and save it to a file.
-    GenerateVolumeFile();
-  } else {
-    // Load the file. If loading fails, generate the file.
-    if (!kv_store_->Load(volume_state_file_)) {
-      LOG(ERROR) << "Could not load volume data file!";
-      GenerateVolumeFile();
-    }
-  }
-  // Inform APS.
-  InitAPSAllStreams();
-}
-
-audio_stream_type_t AudioVolumeHandler::GetVolumeControlStream() {
-  return selected_stream_;
-}
-
-void AudioVolumeHandler::SetVolumeControlStream(audio_stream_type_t stream) {
-  selected_stream_ = stream;
-}
-
-int AudioVolumeHandler::GetNewVolumeIndex(int previous_index, int direction,
-                                          audio_stream_type_t stream) {
-  int current_index =
-      previous_index + ConvertToInternalIndex(stream, direction);
-  if (current_index < kMinIndex_) {
-    return kMinIndex_;
-  } else if (current_index > kMaxIndex_) {
-    return kMaxIndex_;
-  } else
-    return current_index;
-}
-
-void AudioVolumeHandler::AdjustStreamVolume(audio_stream_type_t stream,
-                                            int direction) {
-  VLOG(1) << "Adjusting volume of stream " << selected_stream_
-          << " in direction " << direction;
-  auto device = aps_->getDevicesForStream(stream);
-  int previous_index = GetVolumeCurrentIndex(stream, device);
-  int current_index = GetNewVolumeIndex(previous_index, direction, stream);
-  VLOG(1) << "Current index is " << current_index << " for stream " << stream
-          << " and device " << device;
-  aps_->setStreamVolumeIndex(stream, current_index, device);
-  PersistVolumeConfiguration(selected_stream_, device, current_index);
-  TriggerCallback(stream, previous_index, current_index);
-}
-
-void AudioVolumeHandler::AdjustVolumeActiveStreams(int direction) {
-  if (selected_stream_ != AUDIO_STREAM_DEFAULT) {
-    AdjustStreamVolume(selected_stream_, direction);
-    return;
-  }
-  for (auto stream : kSupportedStreams_) {
-    if (aps_->isStreamActive(stream)) {
-      AdjustStreamVolume(stream, direction);
-      return;
-    }
-  }
-}
-
-void AudioVolumeHandler::ProcessEvent(const struct input_event& event) {
-  VLOG(1) << event.type << " " << event.code << " " << event.value;
-  if (event.type == EV_KEY) {
-    switch (event.code) {
-      case KEY_VOLUMEDOWN:
-        AdjustVolumeActiveStreams(-1);
-        break;
-      case KEY_VOLUMEUP:
-        AdjustVolumeActiveStreams(1);
-        break;
-      default:
-        // This event code is not supported by this handler.
-        break;
-    }
-  }
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/audio_volume_handler.h b/brillo/audio/audioservice/audio_volume_handler.h
deleted file mode 100644 (file)
index fb95c2f..0000000
+++ /dev/null
@@ -1,248 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Handler for input events in /dev/input. AudioVolumeHandler handles events
-// only for volume key presses.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_AUDIO_VOLUME_HANDLER_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_AUDIO_VOLUME_HANDLER_H_
-
-#include <base/bind.h>
-#include <base/files/file_path.h>
-#include <brillo/key_value_store.h>
-#include <gtest/gtest_prod.h>
-#include <linux/input.h>
-#include <media/IAudioPolicyService.h>
-#include <system/audio.h>
-
-#include "audio_daemon_handler.h"
-
-namespace brillo {
-
-class AudioVolumeHandler : public AudioDaemonHandler {
- public:
-  AudioVolumeHandler();
-  virtual ~AudioVolumeHandler();
-
-  // Get the current state of the headset jack and update AudioSystem based on
-  // the initial state.
-  //
-  // |aps| is a pointer to the binder object.
-  virtual void Init(android::sp<android::IAudioPolicyService> aps) override;
-
-  // Process input events from the kernel. Connecting/disconnecting an audio
-  // device will result in multiple calls to this method.
-  //
-  // |event| is a pointer to an input_event. This function should be able to
-  // gracefully handle input events that are not relevant to the functionality
-  // provided by this class.
-  virtual void ProcessEvent(const struct input_event& event) override;
-
-  // Inform the handler that the audio policy service has been disconnected.
-  virtual void APSDisconnect() override;
-
-  // Inform the handler that the audio policy service is reconnected.
-  //
-  // |aps| is a pointer to the binder object.
-  virtual void APSConnect(
-      android::sp<android::IAudioPolicyService> aps) override;
-
-  // Get the stream used when volume buttons are pressed.
-  //
-  // Returns an audio_stream_t representing the stream. If
-  // SetVolumeControlStream isn't called before calling this method,
-  // STREAM_DEFAULT is returned.
-  audio_stream_type_t GetVolumeControlStream();
-
-  // Set the stream to use when volume buttons are pressed.
-  //
-  // |stream| is an int representing the stream. Passing STREAM_DEFAULT to this
-  // method can be used to reset selected_stream_.
-  void SetVolumeControlStream(audio_stream_type_t stream);
-
-  // Register a callback to be triggered when keys are pressed.
-  //
-  // |callback| is an object of type base::Callback.
-  void RegisterCallback(
-      base::Callback<void(audio_stream_type_t, int, int)>& callback);
-
-  // Set the max steps for an audio stream.
-  //
-  // |stream| is an int representing the stream.
-  // |max_index| is an int representing the maximum index to set for |stream|.
-  //
-  // Returns 0 on success and errno on failure.
-  int SetVolumeMaxSteps(audio_stream_type_t stream, int max_steps);
-
-  // Get the max steps for an audio stream.
-  //
-  // |stream| is an int representing the stream.
-  //
-  // Returns the maximum possible index for |stream|.
-  int GetVolumeMaxSteps(audio_stream_type_t stream);
-
-  // Get the volume of a given key.
-  //
-  // |stream| is an int representing the stream.
-  // |device| is an int representing the device.
-  //
-  // Returns an int which corresponds to the current index.
-  int GetVolumeCurrentIndex(audio_stream_type_t stream, audio_devices_t device);
-
-  // Set the volume for a given (stream, device) tuple.
-  //
-  // |stream| is an int representing the stream.
-  // |device| is an int representing the device.
-  // |index| is an int representing the volume.
-  //
-  // Returns 0 on success and errno on failure.
-  int SetVolumeIndex(audio_stream_type_t stream,
-                     audio_devices_t device,
-                     int index);
-
-  // Get the volume for a given (stream, device) tuple.
-  //
-  // |stream| is an int representing the stream.
-  // |device| is an int representing the device.
-  //
-  // Returns the index for the (stream, device) tuple. This index is between 0
-  // and the user defined maximum value.
-  int GetVolumeIndex(audio_stream_type_t stream, audio_devices_t device);
-
-  // Update the volume index for a given stream.
-  //
-  // |previous_index| is the current index of the stream/device tuple before the
-  // volume button is pressed.
-  // |direction| is an int which is multiplied to step_. +1 for volume up and -1
-  // for volume down.
-  // |stream| is an int representing the stream.
-  //
-  // Returns the new volume index.
-  int GetNewVolumeIndex(int previous_index, int direction,
-                        audio_stream_type_t stream);
-
-  // Adjust the volume of the active streams in the direction indicated. If
-  // SetDefaultStream() is called, then only the volume for that stream will be
-  // changed. Calling this method always triggers a callback.
-  //
-  // |direction| is an int which is multiplied to step_. +1 for volume up and -1
-  // for volume down.
-  virtual void AdjustVolumeActiveStreams(int direction);
-
- private:
-  friend class AudioVolumeHandlerTest;
-  FRIEND_TEST(AudioVolumeHandlerTest, FileGeneration);
-  FRIEND_TEST(AudioVolumeHandlerTest, GetVolumeForStreamDeviceTuple);
-  FRIEND_TEST(AudioVolumeHandlerTest, SetVolumeForStreamDeviceTuple);
-  FRIEND_TEST(AudioVolumeHandlerTest, InitNoFile);
-  FRIEND_TEST(AudioVolumeHandlerTest, InitFilePresent);
-  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventEmpty);
-  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyUp);
-  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyDown);
-  FRIEND_TEST(AudioVolumeHandlerTest, SelectStream);
-  FRIEND_TEST(AudioVolumeHandlerTest, ComputeNewVolume);
-  FRIEND_TEST(AudioVolumeHandlerTest, GetSetVolumeIndex);
-
-  // Save the volume for a given (stream, device) tuple.
-  //
-  // |stream| is an int representing the stream.
-  // |device| is an int representing the device.
-  // |index| is an int representing the volume.
-  void PersistVolumeConfiguration(audio_stream_type_t stream,
-                                  audio_devices_t device,
-                                  int index);
-
-  // Read the initial volume of audio streams.
-  //
-  // |path| is the file that contains the initial volume state.
-  void GetInitialVolumeState(const base::FilePath& path);
-
-  // Adjust the volume of a given stream in the direction specified.
-  //
-  // |stream| is an int representing the stream.
-  // |direction| is an int which is multiplied to step_. +1 for volume up and -1
-  // for volume down.
-  void AdjustStreamVolume(audio_stream_type_t stream, int direction);
-
-  // Set the file path for testing.
-  //
-  // |path| to use while running tests.
-  void SetVolumeFilePathForTesting(const base::FilePath& path);
-
-  // Initialize all the streams in the audio policy service.
-  virtual void InitAPSAllStreams();
-
-  // Generate the volume config file.
-  void GenerateVolumeFile();
-
-  // Trigger a callback when a volume button is pressed.
-  //
-  // |stream| is an audio_stream_t representing the stream.
-  // |previous_index| is the volume index before the key press. This is an
-  // absolute index from 0 - 100.
-  // |current_index| is the volume index after the key press. This is an
-  // absolute index from 0 - 100.
-  virtual void TriggerCallback(audio_stream_type_t stream,
-                               int previous_index,
-                               int current_index);
-
-  // Convert internal index to user defined index scale.
-  //
-  // |stream| is an audio_stream_t representing the stream.
-  // |index| is the volume index before the key press. This is an absolute
-  // index from 0 - 100.
-  //
-  // Returns an int between 0 and the user defined max.
-  int ConvertToUserDefinedIndex(audio_stream_type_t stream, int index);
-
-  // Convert user defined index to internal index scale.
-  //
-  // |stream| is an audio_stream_t representing the stream.
-  // |index| is the volume index before the key press. This is an index from 0
-  // and the user defined max.
-  //
-  // Returns an int between 0 and 100.
-  int ConvertToInternalIndex(audio_stream_type_t stream, int index);
-
-  // Stream to use for volume control.
-  audio_stream_type_t selected_stream_;
-  // File backed key-value store of the current index (as seen by the audio
-  // policy service).
-  std::unique_ptr<KeyValueStore> kv_store_;
-  // Supported stream names. The order of this vector defines the priority from
-  // high to low.
-  std::vector<audio_stream_type_t> kSupportedStreams_{
-      AUDIO_STREAM_ALARM, AUDIO_STREAM_NOTIFICATION, AUDIO_STREAM_SYSTEM,
-      AUDIO_STREAM_MUSIC};
-  // Step size for each stream. This is used to translate between user defined
-  // stream ranges and the range as seen by audio policy service. This value is
-  // not file-backed and is intended to be re-applied by the user on reboots and
-  // brilloaudioservice crashes.
-  std::map<audio_stream_type_t, double> step_sizes_;
-  // Callback to call when volume buttons are pressed.
-  base::Callback<void(audio_stream_type_t, int, int)> callback_;
-  // Key indicies.
-  const std::string kCurrentIndexKey_ = "current_index";
-  // Default values.
-  const int kMinIndex_ = 0;
-  const int kDefaultCurrentIndex_ = 30;
-  const int kMaxIndex_ = 100;
-  const int kDefaultStepSize_ = 1;
-  base::FilePath volume_state_file_;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_AUDIO_VOLUME_HANDLER_H_
diff --git a/brillo/audio/audioservice/brillo_audio_client.cpp b/brillo/audio/audioservice/brillo_audio_client.cpp
deleted file mode 100644 (file)
index f347c56..0000000
+++ /dev/null
@@ -1,224 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of brillo_audio_client.h
-
-#include "brillo_audio_client.h"
-
-#include <base/logging.h>
-#include <binder/Status.h>
-#include <binderwrapper/binder_wrapper.h>
-
-#include "brillo_audio_client_helpers.h"
-#include "brillo_audio_device_info_def.h"
-#include "brillo_audio_device_info_internal.h"
-
-using android::binder::Status;
-
-namespace brillo {
-
-static const char kBrilloAudioServiceName[] =
-    "android.brillo.brilloaudioservice.BrilloAudioService";
-
-std::shared_ptr<BrilloAudioClient> BrilloAudioClient::instance_ = nullptr;
-
-int BrilloAudioClient::callback_id_counter_ = 1;
-
-BrilloAudioClient::~BrilloAudioClient() {}
-
-std::weak_ptr<BrilloAudioClient> BrilloAudioClient::GetClientInstance() {
-  if (!instance_) {
-    instance_ = std::shared_ptr<BrilloAudioClient>(new BrilloAudioClient());
-    if (!instance_->Initialize()) {
-      LOG(ERROR) << "Could not Initialize the brillo audio client.";
-      instance_.reset();
-      return instance_;
-    }
-  }
-  return instance_;
-}
-
-android::sp<android::IBinder> BrilloAudioClient::ConnectToService(
-    const std::string& service_name, const base::Closure& callback) {
-  android::BinderWrapper* binder_wrapper =
-      android::BinderWrapper::GetOrCreateInstance();
-  auto service = binder_wrapper->GetService(service_name);
-  if (!service.get()) {
-    return service;
-  }
-  binder_wrapper->RegisterForDeathNotifications(service, callback);
-  return service;
-}
-
-void BrilloAudioClient::OnBASDisconnect() {
-  LOG(WARNING) << "The brillo audio service died! Please reset the "
-               << "BAudioManager.";
-  instance_.reset();
-}
-
-bool BrilloAudioClient::Initialize() {
-  auto service = ConnectToService(
-      kBrilloAudioServiceName, base::Bind(&BrilloAudioClient::OnBASDisconnect,
-                                          weak_ptr_factory_.GetWeakPtr()));
-  if (!service.get()) {
-    LOG(ERROR) << "Could not connect to brillo audio service.";
-    return false;
-  }
-  brillo_audio_service_ = android::interface_cast<IBrilloAudioService>(service);
-  return true;
-}
-
-int BrilloAudioClient::GetDevices(int flag, std::vector<int>& devices) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->GetDevices(flag, &devices);
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::SetDevice(audio_policy_force_use_t usage,
-                                 audio_policy_forced_cfg_t config) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->SetDevice(usage, config);
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::GetMaxVolumeSteps(BAudioUsage usage, int* max_steps) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->GetMaxVolumeSteps(
-      BrilloAudioClientHelpers::GetStreamType(usage), max_steps);
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::SetMaxVolumeSteps(BAudioUsage usage, int max_steps) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->SetMaxVolumeSteps(
-      BrilloAudioClientHelpers::GetStreamType(usage), max_steps);
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::SetVolumeIndex(BAudioUsage usage,
-                                      audio_devices_t device,
-                                      int index) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->SetVolumeIndex(
-      BrilloAudioClientHelpers::GetStreamType(usage), device, index);
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::GetVolumeIndex(BAudioUsage usage,
-                                      audio_devices_t device,
-                                      int* index) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->GetVolumeIndex(
-      BrilloAudioClientHelpers::GetStreamType(usage), device, index);
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::GetVolumeControlStream(BAudioUsage* usage) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  int stream;
-  auto status = brillo_audio_service_->GetVolumeControlStream(&stream);
-  *usage = BrilloAudioClientHelpers::GetBAudioUsage(
-      static_cast<audio_stream_type_t>(stream));
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::SetVolumeControlStream(BAudioUsage usage) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->SetVolumeControlStream(
-      BrilloAudioClientHelpers::GetStreamType(usage));
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::IncrementVolume() {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->IncrementVolume();
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::DecrementVolume() {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto status = brillo_audio_service_->DecrementVolume();
-  return status.serviceSpecificErrorCode();
-}
-
-int BrilloAudioClient::RegisterAudioCallback(
-    android::sp<AudioServiceCallback> callback, int* callback_id) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  if (!brillo_audio_service_->RegisterServiceCallback(callback).isOk()) {
-    *callback_id = 0;
-    return ECONNABORTED;
-  }
-  for (auto& entry : callback_map_) {
-    if (entry.second->Equals(callback)) {
-      LOG(ERROR) << "Callback has already been registered.";
-      *callback_id = 0;
-      return EINVAL;
-    }
-  }
-  *callback_id = callback_id_counter_++;
-  callback_map_.emplace(*callback_id, callback);
-  return 0;
-}
-
-int BrilloAudioClient::UnregisterAudioCallback(int callback_id) {
-  if (!brillo_audio_service_.get()) {
-    OnBASDisconnect();
-    return ECONNABORTED;
-  }
-  auto callback_elem = callback_map_.find(callback_id);
-  if (callback_elem == callback_map_.end()) {
-    // If we were passed an invalid callback_id, do nothing.
-    LOG(ERROR) << "Unregister called with invalid callback ID.";
-    return EINVAL;
-  }
-  brillo_audio_service_->UnregisterServiceCallback(callback_elem->second.get());
-  callback_map_.erase(callback_elem);
-  return 0;
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/brillo_audio_client.h b/brillo/audio/audioservice/brillo_audio_client.h
deleted file mode 100644 (file)
index 00c431a..0000000
+++ /dev/null
@@ -1,183 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Client for the brilloaudioservice.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_H_
-
-#include <map>
-#include <memory>
-#include <vector>
-
-#include <base/bind.h>
-#include <base/memory/weak_ptr.h>
-#include <gtest/gtest_prod.h>
-#include <media/IAudioPolicyService.h>
-
-#include "android/brillo/brilloaudioservice/IBrilloAudioService.h"
-#include "audio_service_callback.h"
-
-using android::brillo::brilloaudioservice::IBrilloAudioService;
-
-namespace brillo {
-
-class BrilloAudioClient {
- public:
-  virtual ~BrilloAudioClient();
-
-  // Get or create a pointer to the client instance.
-  //
-  // Returns a weak_ptr to a BrilloAudioClient object.
-  static std::weak_ptr<BrilloAudioClient> GetClientInstance();
-
-  // Query brillo audio service to get list of connected audio devices.
-  //
-  // |flag| is an int which is either GET_DEVICES_INPUTS or GET_DEVICES_OUTPUTS.
-  // |devices| is a reference to a vector of audio_devices_t.
-  //
-  // Returns 0 on success and errno on failure.
-  int GetDevices(int flag, std::vector<int>& devices);
-
-  // Register a callback object with the service.
-  //
-  // |callback| is a ref pointer to a callback object to be register with the
-  // brillo audio service.
-  // |callback_id| is a pointer to an int that represents a callback id token on
-  // success and 0 on failure.
-  //
-  // Returns 0 on success and errno on failure.
-  int RegisterAudioCallback(android::sp<AudioServiceCallback> callback,
-                            int* callback_id);
-
-  // Unregister a callback object with the service.
-  //
-  // |callback_id| is an int referring to the callback object.
-  //
-  // Returns 0 on success and errno on failure.
-  int UnregisterAudioCallback(int callback_id);
-
-  // Set a device to be the default. This does not communicate with the brillo
-  // audio service but instead communicates directly with the audio policy
-  // service.
-  //
-  // Please see system/audio_policy.h for details on these arguments.
-  //
-  // Returns 0 on success and errno on failure.
-  int SetDevice(audio_policy_force_use_t usage,
-                audio_policy_forced_cfg_t config);
-
-  // Get the maximum number of steps for a given BAudioUsage.
-  //
-  // |usage| is an enum of type BAudioUsage.
-  // |max_steps| is a pointer to the maximum number of steps.
-  //
-  // Returns 0 on success and errno on failure.
-  int GetMaxVolumeSteps(BAudioUsage usage, int* max_steps);
-
-  // Set the maximum number of steps to use for a given BAudioUsage.
-  //
-  // |usage| is an enum of type BAudioUsage.
-  // |max_steps| is an int between 0 and 100.
-  //
-  // Returns 0 on success and errno on failure.
-  int SetMaxVolumeSteps(BAudioUsage usage, int max_steps);
-
-  // Set the volume index for a given BAudioUsage and device.
-  //
-  // |usage| is an enum of type BAudioUsage.
-  // |device| is of type audio_devices_t.
-  // |index| is an int representing the current index.
-  //
-  // Returns 0 on success and errno on failure.
-  int SetVolumeIndex(BAudioUsage usage, audio_devices_t device, int index);
-
-  // Get the volume index for a given BAudioUsage and device.
-  //
-  // |usage| is an enum of type BAudioUsage.
-  // |device| is of type audio_devices_t.
-  // |index| is a pointer to an int representing the current index.
-  //
-  // Returns 0 on success and errno on failure.
-  int GetVolumeIndex(BAudioUsage usage, audio_devices_t device, int* index);
-
-  // Get default stream to use for volume buttons.
-  //
-  // |usage| is a pointer to a BAudioUsage.
-  //
-  // Returns 0 on success and errno on failure.
-  int GetVolumeControlStream(BAudioUsage* usage);
-
-  // Set default stream to use for volume buttons.
-  //
-  // |usage| is an enum of type BAudioUsage.
-  //
-  // Returns 0 on success and errno on failure.
-  int SetVolumeControlStream(BAudioUsage usage);
-
-  // Increment the volume.
-  //
-  // Returns 0 on success and errno on failure.
-  int IncrementVolume();
-
-  // Decrement the volume.
-  //
-  // Returns 0 on success and errno on failure.
-  int DecrementVolume();
-
- protected:
-  BrilloAudioClient() = default;
-
- private:
-  friend class BrilloAudioClientTest;
-  FRIEND_TEST(BrilloAudioClientTest, InitializeNoService);
-  FRIEND_TEST(BrilloAudioClientTest,
-              CheckInitializeRegistersForDeathNotifications);
-
-  // Initialize the BrilloAudioClient object and connects to the brillo audio
-  // service and the audio policy service. It also registers for death
-  // notifications.
-  bool Initialize();
-
-  // Callback to be triggered when the brillo audio service dies. It attempts to
-  // reconnect to the service.
-  virtual void OnBASDisconnect();
-
-  // Helper method to connect to a service and register a callback to receive
-  // death notifications.
-  //
-  // |service_name| is a string representing the name of the service.
-  // |callback| is a base::Closure which will be called if the service dies.
-  android::sp<android::IBinder> ConnectToService(const std::string& service_name,
-                                                 const base::Closure& callback);
-
-  // Pointer to the BrilloAudioClient object.
-  static std::shared_ptr<BrilloAudioClient> instance_;
-
-  // Used to generate weak_ptr to BrilloAudioClient for use in base::Bind.
-  base::WeakPtrFactory<BrilloAudioClient> weak_ptr_factory_{this};
-  // Pointer to the brillo audio service.
-  android::sp<IBrilloAudioService> brillo_audio_service_;
-  // Counter for callback IDs.
-  static int callback_id_counter_;
-  // Map of callback ids to callback objects.
-  std::map<int, android::sp<AudioServiceCallback> > callback_map_;
-
-  DISALLOW_COPY_AND_ASSIGN(BrilloAudioClient);
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_H_
diff --git a/brillo/audio/audioservice/brillo_audio_client_helpers.cpp b/brillo/audio/audioservice/brillo_audio_client_helpers.cpp
deleted file mode 100644 (file)
index 871c7a9..0000000
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#include "brillo_audio_client_helpers.h"
-
-namespace brillo {
-
-audio_policy_force_use_t BrilloAudioClientHelpers::GetForceUse(
-    BAudioUsage usage) {
-  if (usage == kUsageMedia)
-    return AUDIO_POLICY_FORCE_FOR_MEDIA;
-  else
-    return AUDIO_POLICY_FORCE_FOR_SYSTEM;
-}
-
-audio_stream_type_t BrilloAudioClientHelpers::GetStreamType(BAudioUsage usage) {
-  switch (usage) {
-    case kUsageAlarm:
-      return AUDIO_STREAM_ALARM;
-    case kUsageMedia:
-      return AUDIO_STREAM_MUSIC;
-    case kUsageNotifications:
-      return AUDIO_STREAM_NOTIFICATION;
-    case kUsageSystem:
-      return AUDIO_STREAM_SYSTEM;
-    default:
-      return AUDIO_STREAM_DEFAULT;
-  }
-}
-
-BAudioUsage BrilloAudioClientHelpers::GetBAudioUsage(
-    audio_stream_type_t stream) {
-  switch (stream) {
-    case AUDIO_STREAM_ALARM:
-      return kUsageAlarm;
-    case AUDIO_STREAM_MUSIC:
-      return kUsageMedia;
-    case AUDIO_STREAM_NOTIFICATION:
-      return kUsageNotifications;
-    case AUDIO_STREAM_SYSTEM:
-      return kUsageSystem;
-    default:
-      return kUsageInvalid;
-  }
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/brillo_audio_client_helpers.h b/brillo/audio/audioservice/brillo_audio_client_helpers.h
deleted file mode 100644 (file)
index a5bb7ba..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Helpers for the brillo audio client.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_HELPERS_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_HELPERS_H_
-
-#include <gtest/gtest_prod.h>
-#include <system/audio.h>
-#include <system/audio_policy.h>
-
-#include "include/brillo_audio_manager.h"
-
-namespace brillo {
-
-class BrilloAudioClientHelpers {
- public:
-  static audio_policy_force_use_t GetForceUse(BAudioUsage usage);
-  static audio_stream_type_t GetStreamType(BAudioUsage usage);
-  static BAudioUsage GetBAudioUsage(audio_stream_type_t stream);
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_HELPERS_H_
diff --git a/brillo/audio/audioservice/brillo_audio_device_info.cpp b/brillo/audio/audioservice/brillo_audio_device_info.cpp
deleted file mode 100644 (file)
index 611bcc5..0000000
+++ /dev/null
@@ -1,38 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of brillo_audio_device_info.h.
-
-#include "include/brillo_audio_device_info.h"
-
-#include "brillo_audio_device_info_def.h"
-#include "brillo_audio_device_info_internal.h"
-
-using brillo::BAudioDeviceInfoInternal;
-
-BAudioDeviceInfo* BAudioDeviceInfo_new(int device) {
-  BAudioDeviceInfo* audio_device_info = new BAudioDeviceInfo;
-  audio_device_info->internal_ =
-      std::make_unique<BAudioDeviceInfoInternal>(device);
-  return audio_device_info;
-}
-
-int BAudioDeviceInfo_getType(BAudioDeviceInfo* device) {
-  return device->internal_->GetDeviceId();
-}
-
-void BAudioDeviceInfo_delete(BAudioDeviceInfo* device) {
-  delete device;
-}
diff --git a/brillo/audio/audioservice/brillo_audio_device_info_def.h b/brillo/audio/audioservice/brillo_audio_device_info_def.h
deleted file mode 100644 (file)
index 3bf1f66..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Definition of BAudioDeviceInfo.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_DEF_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_DEF_H_
-
-
-#include <memory>
-
-#include "brillo_audio_device_info_internal.h"
-#include "include/brillo_audio_device_info.h"
-
-using brillo::BAudioDeviceInfoInternal;
-
-struct BAudioDeviceInfo {
-  std::unique_ptr<BAudioDeviceInfoInternal> internal_;
-};
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_DEF_H_
diff --git a/brillo/audio/audioservice/brillo_audio_device_info_internal.cpp b/brillo/audio/audioservice/brillo_audio_device_info_internal.cpp
deleted file mode 100644 (file)
index 215da21..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Internal helpers for BAudioDeviceInfo.
-
-#include "brillo_audio_device_info_internal.h"
-
-#include <base/logging.h>
-
-#include "brillo_audio_device_info_def.h"
-
-namespace brillo {
-
-BAudioDeviceInfoInternal::BAudioDeviceInfoInternal(int device_id) {
-  device_id_ = device_id;
-}
-
-int BAudioDeviceInfoInternal::GetDeviceId() {
-  return device_id_;
-}
-
-audio_policy_forced_cfg_t BAudioDeviceInfoInternal::GetConfig() {
-  switch (device_id_) {
-    case TYPE_BUILTIN_SPEAKER:
-      return AUDIO_POLICY_FORCE_SPEAKER;
-    case TYPE_WIRED_HEADSET:
-      return AUDIO_POLICY_FORCE_HEADPHONES;
-    case TYPE_WIRED_HEADSET_MIC:
-      return AUDIO_POLICY_FORCE_HEADPHONES;
-    case TYPE_WIRED_HEADPHONES:
-      return AUDIO_POLICY_FORCE_HEADPHONES;
-    case TYPE_BUILTIN_MIC:
-      return AUDIO_POLICY_FORCE_NONE;
-    default:
-      return AUDIO_POLICY_FORCE_NONE;
-  }
-}
-
-audio_devices_t BAudioDeviceInfoInternal::GetAudioDevicesT() {
-  switch (device_id_) {
-    case TYPE_BUILTIN_SPEAKER:
-      return AUDIO_DEVICE_OUT_SPEAKER;
-    case TYPE_WIRED_HEADSET:
-      return AUDIO_DEVICE_OUT_WIRED_HEADSET;
-    case TYPE_WIRED_HEADSET_MIC:
-      return AUDIO_DEVICE_IN_WIRED_HEADSET;
-    case TYPE_WIRED_HEADPHONES:
-      return AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
-    case TYPE_BUILTIN_MIC:
-      return AUDIO_DEVICE_IN_BUILTIN_MIC;
-    default:
-      return AUDIO_DEVICE_NONE;
-  }
-}
-
-BAudioDeviceInfoInternal* BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
-    unsigned int device) {
-  int device_id = TYPE_UNKNOWN;
-  switch (device) {
-    case AUDIO_DEVICE_OUT_WIRED_HEADSET:
-      device_id = TYPE_WIRED_HEADSET;
-      break;
-    case AUDIO_DEVICE_OUT_WIRED_HEADPHONE:
-      device_id = TYPE_WIRED_HEADPHONES;
-      break;
-    case AUDIO_DEVICE_IN_WIRED_HEADSET:
-      device_id = TYPE_WIRED_HEADSET_MIC;
-      break;
-  }
-  if (device_id == TYPE_UNKNOWN) {
-    LOG(ERROR) << "Unsupported device.";
-    return nullptr;
-  }
-  return new BAudioDeviceInfoInternal(device_id);
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/brillo_audio_device_info_internal.h b/brillo/audio/audioservice/brillo_audio_device_info_internal.h
deleted file mode 100644 (file)
index 2e60c6f..0000000
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Internal class to represent BAudioDeviceInfo.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_INTERNAL_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_INTERNAL_H_
-
-#include <vector>
-
-#include <gtest/gtest_prod.h>
-#include <hardware/audio_policy.h>
-
-#include "include/brillo_audio_device_info.h"
-
-namespace brillo {
-
-class BAudioDeviceInfoInternal {
- public:
-  // Constructor for BAudioDeviceInfoInternal.
-  //
-  // |device_id| is an integer representing an audio device type as defined in
-  // brillo_audio_device_info.h.
-  explicit BAudioDeviceInfoInternal(int device_id);
-
-  // Get audio policy config.
-  //
-  // Returns an audio_policy_forced_cfg_t.
-  audio_policy_forced_cfg_t GetConfig();
-
-  // Create a BAudioDeviceInfoInternal object from a audio_devices_t device
-  // type.
-  //
-  // |devices_t| is an audio device of type audio_devices_t which is represented
-  // using an int.
-  //
-  // Returns a pointer to a BAudioDeviceInfoInternal that has been created.
-  static BAudioDeviceInfoInternal* CreateFromAudioDevicesT(unsigned int device);
-
-  // Get the device id.
-  //
-  // Returns an int which is the device_id.
-  int GetDeviceId();
-
-  // Get audio_devices_t that corresponds to device_id;
-  //
-  // Returns an audio_devices_t.
-  audio_devices_t GetAudioDevicesT();
-
- private:
-  FRIEND_TEST(BrilloAudioDeviceInfoInternalTest, InWiredHeadset);
-  FRIEND_TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadset);
-  FRIEND_TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadphone);
-
-  // An int representing the underlying audio device. The int is one of the
-  // constants defined in brillo_audio_device_info.h.
-  int device_id_;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_INTERNAL_H_
diff --git a/brillo/audio/audioservice/brillo_audio_manager.cpp b/brillo/audio/audioservice/brillo_audio_manager.cpp
deleted file mode 100644 (file)
index 4c09824..0000000
+++ /dev/null
@@ -1,227 +0,0 @@
-  // Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of brillo_audio_manager.h.
-
-#include "include/brillo_audio_manager.h"
-
-#include <memory>
-#include <stdlib.h>
-
-#include "audio_service_callback.h"
-#include "brillo_audio_client.h"
-#include "brillo_audio_client_helpers.h"
-#include "brillo_audio_device_info_def.h"
-#include "brillo_audio_device_info_internal.h"
-
-using brillo::AudioServiceCallback;
-using brillo::BrilloAudioClient;
-using brillo::BrilloAudioClientHelpers;
-
-struct BAudioManager {
-  std::weak_ptr<BrilloAudioClient> client_;
-};
-
-BAudioManager* BAudioManager_new() {
-  auto client = BrilloAudioClient::GetClientInstance();
-  if (!client.lock())
-    return nullptr;
-  BAudioManager* bam = new BAudioManager;
-  bam->client_ = client;
-  return bam;
-}
-
-int BAudioManager_getDevices(
-    const BAudioManager* brillo_audio_manager, int flag,
-    BAudioDeviceInfo* device_array[], unsigned int size,
-    unsigned int* num_devices) {
-  if (!brillo_audio_manager || !num_devices ||
-      (flag != GET_DEVICES_INPUTS && flag != GET_DEVICES_OUTPUTS))
-    return EINVAL;
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    *num_devices = 0;
-    return ECONNABORTED;
-  }
-  std::vector<int> devices;
-  auto rc = client->GetDevices(flag, devices);
-  if (rc) {
-    *num_devices = 0;
-    return rc;
-  }
-  unsigned int num_elems = (devices.size() < size) ? devices.size() : size;
-  for (size_t i = 0; i < num_elems; i++) {
-    device_array[i] = new BAudioDeviceInfo;
-    device_array[i]->internal_ = std::unique_ptr<BAudioDeviceInfoInternal>(
-        BAudioDeviceInfoInternal::CreateFromAudioDevicesT(devices[i]));
-  }
-  *num_devices = devices.size();
-  return 0;
-}
-
-int BAudioManager_setInputDevice(const BAudioManager* brillo_audio_manager,
-                                 const BAudioDeviceInfo* device) {
-  if (!brillo_audio_manager || !device)
-    return EINVAL;
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    return ECONNABORTED;
-  }
-  return client->SetDevice(AUDIO_POLICY_FORCE_FOR_RECORD,
-                           device->internal_->GetConfig());
-}
-
-int BAudioManager_setOutputDevice(
-    const BAudioManager* brillo_audio_manager, const BAudioDeviceInfo* device,
-    BAudioUsage usage) {
-  if (!brillo_audio_manager || !device)
-    return EINVAL;
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client)
-    return ECONNABORTED;
-  return client->SetDevice(BrilloAudioClientHelpers::GetForceUse(usage),
-                           device->internal_->GetConfig());
-}
-
-int BAudioManager_getMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
-                                    BAudioUsage usage,
-                                    int* max_steps) {
-  if (!brillo_audio_manager || !max_steps)
-    return EINVAL;
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client)
-    return ECONNABORTED;
-  return client->GetMaxVolumeSteps(usage, max_steps);
-}
-
-int BAudioManager_setMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
-                                    BAudioUsage usage,
-                                    int max_steps) {
-  if (!brillo_audio_manager || max_steps < 0 || max_steps > 100)
-    return EINVAL;
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client)
-    return ECONNABORTED;
-  return client->SetMaxVolumeSteps(usage, max_steps);
-}
-
-int BAudioManager_setVolumeIndex(const BAudioManager* brillo_audio_manager,
-                                 BAudioUsage usage,
-                                 const BAudioDeviceInfo* device,
-                                 int index) {
-  if (!brillo_audio_manager || !device) {
-    return EINVAL;
-  }
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    return ECONNABORTED;
-  }
-  return client->SetVolumeIndex(
-      usage, device->internal_->GetAudioDevicesT(), index);
-}
-
-int BAudioManager_getVolumeIndex(const BAudioManager* brillo_audio_manager,
-                                 BAudioUsage usage,
-                                 const BAudioDeviceInfo* device,
-                                 int* index) {
-  if (!brillo_audio_manager || !device || !index) {
-    return EINVAL;
-  }
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    return ECONNABORTED;
-  }
-  return client->GetVolumeIndex(
-      usage, device->internal_->GetAudioDevicesT(), index);
-}
-
-int BAudioManager_getVolumeControlUsage(
-    const BAudioManager* brillo_audio_manager, BAudioUsage* usage) {
-  if (!brillo_audio_manager || !usage) {
-    return EINVAL;
-  }
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    return ECONNABORTED;
-  }
-  return client->GetVolumeControlStream(usage);
-}
-
-int BAudioManager_setVolumeControlUsage(
-    const BAudioManager* brillo_audio_manager, BAudioUsage usage) {
-  if (!brillo_audio_manager) {
-    return EINVAL;
-  }
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    return ECONNABORTED;
-  }
-  return client->SetVolumeControlStream(usage);
-}
-
-int BAudioManager_incrementVolume(const BAudioManager* brillo_audio_manager) {
-  if (!brillo_audio_manager) {
-    return EINVAL;
-  }
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    return ECONNABORTED;
-  }
-  return client->IncrementVolume();
-}
-
-int BAudioManager_decrementVolume(const BAudioManager* brillo_audio_manager) {
-  if (!brillo_audio_manager) {
-    return EINVAL;
-  }
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    return ECONNABORTED;
-  }
-  return client->DecrementVolume();
-}
-
-int BAudioManager_registerAudioCallback(
-    const BAudioManager* brillo_audio_manager, const BAudioCallback* callback,
-    void* user_data, int* callback_id) {
-  if (!brillo_audio_manager || !callback || !callback_id)
-    return EINVAL;
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client) {
-    *callback_id = 0;
-    return ECONNABORTED;
-  }
-  // This copies the BAudioCallback into AudioServiceCallback so the
-  // BAudioCallback can be safely deleted.
-  return client->RegisterAudioCallback(
-      new AudioServiceCallback(callback, user_data), callback_id);
-}
-
-int BAudioManager_unregisterAudioCallback(
-    const BAudioManager* brillo_audio_manager, int callback_id) {
-  if (!brillo_audio_manager)
-    return EINVAL;
-  auto client = brillo_audio_manager->client_.lock();
-  if (!client)
-    return ECONNABORTED;
-  return client->UnregisterAudioCallback(callback_id);
-}
-
-int BAudioManager_delete(BAudioManager* brillo_audio_manager) {
-  if (!brillo_audio_manager)
-    return EINVAL;
-  delete brillo_audio_manager;
-  return 0;
-}
diff --git a/brillo/audio/audioservice/brillo_audio_service.h b/brillo/audio/audioservice/brillo_audio_service.h
deleted file mode 100644 (file)
index 87ca0d7..0000000
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_H_
-
-#include "android/brillo/brilloaudioservice/BnBrilloAudioService.h"
-
-#include <memory>
-#include <set>
-#include <vector>
-
-#include <binder/Status.h>
-
-#include "android/brillo/brilloaudioservice/IAudioServiceCallback.h"
-#include "audio_device_handler.h"
-#include "audio_volume_handler.h"
-
-using android::binder::Status;
-using android::brillo::brilloaudioservice::BnBrilloAudioService;
-using android::brillo::brilloaudioservice::IAudioServiceCallback;
-
-namespace brillo {
-
-class BrilloAudioService : public BnBrilloAudioService {
- public:
-  virtual ~BrilloAudioService() {}
-
-  // From AIDL.
-  virtual Status GetDevices(int flag, std::vector<int>* _aidl_return) = 0;
-  virtual Status SetDevice(int usage, int config) = 0;
-  virtual Status GetMaxVolumeSteps(int stream, int* _aidl_return) = 0;
-  virtual Status SetMaxVolumeSteps(int stream, int max_steps) = 0;
-  virtual Status SetVolumeIndex(int stream, int device, int index) = 0;
-  virtual Status GetVolumeIndex(int stream, int device, int* _aidl_return) = 0;
-  virtual Status GetVolumeControlStream(int* _aidl_return) = 0;
-  virtual Status SetVolumeControlStream(int stream) = 0;
-  virtual Status IncrementVolume() = 0;
-  virtual Status DecrementVolume() = 0;
-  virtual Status RegisterServiceCallback(
-      const android::sp<IAudioServiceCallback>& callback) = 0;
-  virtual Status UnregisterServiceCallback(
-      const android::sp<IAudioServiceCallback>& callback) = 0;
-
-  // Register daemon handlers.
-  //
-  // |audio_device_handler| is a weak pointer to an audio device handler object.
-  // |audio_volume_handler| is a weak pointer to an audio volume handler object.
-  virtual void RegisterHandlers(
-      std::weak_ptr<AudioDeviceHandler> audio_device_handler,
-      std::weak_ptr<AudioVolumeHandler> audio_volume_handler) = 0;
-
-  // Callback to be called when a device is connected.
-  //
-  // |devices| is a vector of ints representing the audio_devices_t.
-  virtual void OnDevicesConnected(const std::vector<int>& device) = 0;
-
-  // Callback to be called when a device is disconnected.
-  //
-  // |devices| is a vector of ints representing the audio_devices_t.
-  virtual void OnDevicesDisconnected(const std::vector<int>& device) = 0;
-
-  // Callback to be called when the volume is changed.
-  //
-  // |stream| is an audio_stream_type_t representing the stream.
-  // |previous_index| is the volume index before the key press.
-  // |current_index| is the volume index after the key press.
-  virtual void OnVolumeChanged(audio_stream_type_t stream,
-                               int previous_index,
-                               int current_index) = 0;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_H_
diff --git a/brillo/audio/audioservice/brillo_audio_service_impl.cpp b/brillo/audio/audioservice/brillo_audio_service_impl.cpp
deleted file mode 100644 (file)
index 1585755..0000000
+++ /dev/null
@@ -1,193 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Implementation of brillo_audio_service_impl.h
-
-#include "brillo_audio_service_impl.h"
-
-using android::binder::Status;
-
-namespace brillo {
-
-Status BrilloAudioServiceImpl::GetDevices(int flag,
-                                          std::vector<int>* _aidl_return) {
-  auto device_handler = audio_device_handler_.lock();
-  if (!device_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio device handler died."));
-  }
-  if (flag == BrilloAudioService::GET_DEVICES_INPUTS) {
-    device_handler->GetInputDevices(_aidl_return);
-  } else if (flag == BrilloAudioService::GET_DEVICES_OUTPUTS) {
-    device_handler->GetOutputDevices(_aidl_return);
-  } else {
-    return Status::fromServiceSpecificError(EINVAL,
-                                            android::String8("Invalid flag."));
-  }
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::SetDevice(int usage, int config) {
-  auto device_handler = audio_device_handler_.lock();
-  if (!device_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio device handler died."));
-  }
-  int rc =
-      device_handler->SetDevice(static_cast<audio_policy_force_use_t>(usage),
-                                static_cast<audio_policy_forced_cfg_t>(config));
-  if (rc) return Status::fromServiceSpecificError(rc);
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::RegisterServiceCallback(
-    const android::sp<IAudioServiceCallback>& callback) {
-  callbacks_set_.insert(callback);
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::UnregisterServiceCallback(
-    const android::sp<IAudioServiceCallback>& callback) {
-  callbacks_set_.erase(callback);
-  return Status::ok();
-}
-
-void BrilloAudioServiceImpl::RegisterHandlers(
-    std::weak_ptr<AudioDeviceHandler> audio_device_handler,
-    std::weak_ptr<AudioVolumeHandler> audio_volume_handler) {
-  audio_device_handler_ = audio_device_handler;
-  audio_volume_handler_ = audio_volume_handler;
-}
-
-Status BrilloAudioServiceImpl::GetMaxVolumeSteps(int stream,
-                                                 int* _aidl_return) {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  *_aidl_return = volume_handler->GetVolumeMaxSteps(
-      static_cast<audio_stream_type_t>(stream));
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::SetMaxVolumeSteps(int stream, int max_steps) {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  int rc = volume_handler->SetVolumeMaxSteps(
-      static_cast<audio_stream_type_t>(stream), max_steps);
-  if (rc)
-    return Status::fromServiceSpecificError(rc);
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::SetVolumeIndex(int stream,
-                                              int device,
-                                              int index) {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  int rc =
-      volume_handler->SetVolumeIndex(static_cast<audio_stream_type_t>(stream),
-                                     static_cast<audio_devices_t>(device),
-                                     index);
-  if (rc)
-    return Status::fromServiceSpecificError(rc);
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::GetVolumeIndex(int stream,
-                                              int device,
-                                              int* _aidl_return) {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  *_aidl_return =
-      volume_handler->GetVolumeIndex(static_cast<audio_stream_type_t>(stream),
-                                     static_cast<audio_devices_t>(device));
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::IncrementVolume() {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  volume_handler->AdjustVolumeActiveStreams(1);
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::GetVolumeControlStream(int* _aidl_return) {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  *_aidl_return = volume_handler->GetVolumeControlStream();
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::SetVolumeControlStream(int stream) {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  volume_handler->SetVolumeControlStream(
-      static_cast<audio_stream_type_t>(stream));
-  return Status::ok();
-}
-
-Status BrilloAudioServiceImpl::DecrementVolume() {
-  auto volume_handler = audio_volume_handler_.lock();
-  if (!volume_handler) {
-    return Status::fromServiceSpecificError(
-        EREMOTEIO, android::String8("The audio volume handler died."));
-  }
-  volume_handler->AdjustVolumeActiveStreams(-1);
-  return Status::ok();
-}
-
-void BrilloAudioServiceImpl::OnDevicesConnected(
-    const std::vector<int>& devices) {
-  for (const auto& callback : callbacks_set_) {
-    callback->OnAudioDevicesConnected(devices);
-  }
-}
-
-void BrilloAudioServiceImpl::OnDevicesDisconnected(
-    const std::vector<int>& devices) {
-  for (const auto& callback : callbacks_set_) {
-    callback->OnAudioDevicesDisconnected(devices);
-  }
-}
-
-void BrilloAudioServiceImpl::OnVolumeChanged(audio_stream_type_t stream,
-                                             int previous_index,
-                                             int current_index) {
-  for (const auto& callback : callbacks_set_) {
-    callback->OnVolumeChanged(stream, previous_index, current_index);
-  }
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/brillo_audio_service_impl.h b/brillo/audio/audioservice/brillo_audio_service_impl.h
deleted file mode 100644 (file)
index af53b66..0000000
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_IMPL_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_IMPL_H_
-
-// Server side implementation of brillo audio service.
-
-#include "brillo_audio_service.h"
-
-namespace brillo {
-
-class BrilloAudioServiceImpl : public BrilloAudioService {
- public:
-  ~BrilloAudioServiceImpl() = default;
-
-  // From AIDL.
-  Status GetDevices(int flag, std::vector<int>* _aidl_return) override;
-  Status SetDevice(int usage, int config) override;
-  Status GetMaxVolumeSteps(int stream, int* _aidl_return) override;
-  Status SetMaxVolumeSteps(int stream, int max_steps) override;
-  Status SetVolumeIndex(int stream, int device, int index) override;
-  Status GetVolumeIndex(int stream, int device, int* _aidl_return) override;
-  Status GetVolumeControlStream(int* _aidl_return) override;
-  Status SetVolumeControlStream(int stream) override;
-  Status IncrementVolume() override;
-  Status DecrementVolume() override;
-  Status RegisterServiceCallback(
-      const android::sp<IAudioServiceCallback>& callback) override;
-  Status UnregisterServiceCallback(
-      const android::sp<IAudioServiceCallback>& callback) override;
-
-  // Register daemon handlers.
-  //
-  // |audio_device_handler| is a weak pointer to an audio device handler object.
-  // |audio_volume_handler| is a weak pointer to an audio volume handler object.
-  void RegisterHandlers(
-      std::weak_ptr<AudioDeviceHandler> audio_device_handler,
-      std::weak_ptr<AudioVolumeHandler> audio_volume_handler) override;
-
-  // Callback to be called when a device is connected.
-  //
-  // |devices| is a vector of ints representing the audio_devices_t.
-  void OnDevicesConnected(const std::vector<int>& device) override;
-
-  // Callback to be called when a device is disconnected.
-  //
-  // |devices| is a vector of ints representing the audio_devices_t.
-  void OnDevicesDisconnected(const std::vector<int>& device) override;
-
-  // Callback to be called when volume is changed.
-  //
-  // |stream| is an int representing the stream.
-  // |previous_index| is the volume index before the key press.
-  // |current_index| is the volume index after the key press.
-  void OnVolumeChanged(audio_stream_type_t stream,
-                       int previous_index,
-                       int current_index) override;
-
- private:
-  // A weak pointer to the audio device handler.
-  std::weak_ptr<AudioDeviceHandler> audio_device_handler_;
-  // A weak pointer to the audio volume handler.
-  std::weak_ptr<AudioVolumeHandler> audio_volume_handler_;
-  // List of all callbacks objects registered with the service.
-  std::set<android::sp<IAudioServiceCallback> > callbacks_set_;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_IMPL_H_
diff --git a/brillo/audio/audioservice/brilloaudioserv.rc b/brillo/audio/audioservice/brilloaudioserv.rc
deleted file mode 100644 (file)
index 0595c33..0000000
+++ /dev/null
@@ -1,4 +0,0 @@
-service brilloaudioserv /system/bin/brilloaudioservice
-    class late_start
-    user audioserver
-    group input
diff --git a/brillo/audio/audioservice/include/brillo_audio_device_info.h b/brillo/audio/audioservice/include/brillo_audio_device_info.h
deleted file mode 100644 (file)
index 5c386b4..0000000
+++ /dev/null
@@ -1,74 +0,0 @@
-// copyright 2016 the android open source project
-//
-// licensed under the apache license, version 2.0 (the "license");
-// you may not use this file except in compliance with the license.
-// you may obtain a copy of the license at
-//
-//      http://www.apache.org/licenses/license-2.0
-//
-// unless required by applicable law or agreed to in writing, software
-// distributed under the license is distributed on an "as is" basis,
-// without warranties or conditions of any kind, either express or implied.
-// see the license for the specific language governing permissions and
-// limitations under the license.
-//
-
-// Type to represent audio devices in a brillo system.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_H_
-
-#include <sys/cdefs.h>
-
-__BEGIN_DECLS
-
-struct BAudioDeviceInfo;
-
-typedef struct BAudioDeviceInfo BAudioDeviceInfo;
-
-// A device type associated with an unknown or uninitialized device.
-static const int TYPE_UNKNOWN = 0;
-
-// A device type describing the speaker system (i.e. a mono speaker or stereo
-// speakers) built in a device.
-static const int TYPE_BUILTIN_SPEAKER = 1;
-
-// A device type describing a headset, which is the combination of a headphones
-// and microphone. This type represents just the transducer in the headset.
-static const int TYPE_WIRED_HEADSET = 2;
-
-// A device type describing a headset, which is the combination of a headphones
-// and microphone. This type represents the microphone in the headset.
-static const int TYPE_WIRED_HEADSET_MIC = 3;
-
-// A device type describing a pair of wired headphones.
-static const int TYPE_WIRED_HEADPHONES = 4;
-
-// A device type describing the microphone(s) built in a device.
-static const int TYPE_BUILTIN_MIC = 5;
-
-// Create a BAudioDeviceInfo based on a type described above.
-//
-// Arg:
-//   device: An int representing an audio type as defined above.
-//
-// Returns a pointer to a BAudioDeviceInfo object.
-BAudioDeviceInfo* BAudioDeviceInfo_new(int device);
-
-// Get the type of the device.
-//
-// Arg:
-//   device: A pointer to a BAudioDeviceInfo object to be freed.
-//
-// Returns an int representing the type of the device.
-int BAudioDeviceInfo_getType(BAudioDeviceInfo* device);
-
-// Free a BAudioDeviceInfo.
-//
-// Arg:
-//   device: A pointer to a BAudioDeviceInfo object to be freed.
-void BAudioDeviceInfo_delete(BAudioDeviceInfo* device);
-
-__END_DECLS
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_DEVICE_INFO_H_
diff --git a/brillo/audio/audioservice/include/brillo_audio_manager.h b/brillo/audio/audioservice/include/brillo_audio_manager.h
deleted file mode 100644 (file)
index ff80daa..0000000
+++ /dev/null
@@ -1,258 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Class to manage audio devices in Brillo.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_MANAGER_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_MANAGER_H_
-
-#include <sys/cdefs.h>
-
-#include "brillo_audio_device_info.h"
-
-__BEGIN_DECLS
-
-struct BAudioManager;
-
-typedef struct BAudioManager BAudioManager;
-
-// Get a pointer to a BAudioManager. This object will refer to the same
-// underlying client object no matter how many times it is called.
-//
-// Returns a pointer to a BAudioManager. Returns NULL on failure.
-BAudioManager* BAudioManager_new();
-
-// Flag to get input devices.
-static const int GET_DEVICES_INPUTS = 1;
-// Flag to get output devices.
-static const int GET_DEVICES_OUTPUTS = 2;
-
-// Returns the list of input/output devices connected to the system.
-//
-// Arg:
-//   brillo_audio_manager: A pointer to a BAudioManager.
-//   flag: Either GET_DEVICES_INPUTS or GET_DEVICES_OUTPUTS.
-//   device_array: An array of BAudioDeviceInfo pointers. The caller has to
-//                 allocate this array.
-//   size: The size of device_array.
-//   num_devices: A pointer to an unsigned int which will represent the number
-//                of audio devices connected to the device.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_getDevices(
-    const BAudioManager* brillo_audio_manager, int flag,
-    BAudioDeviceInfo* device_array[], unsigned int size,
-    unsigned int* num_devices);
-
-// Select the input device to be used for recording.
-//
-// Arg:
-//   brillo_audio_manager: A pointer to a BAudioManager.
-//   device: Device to set as the input device. Note that the device has to be
-//           an input device.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_setInputDevice(const BAudioManager* brillo_audio_manager,
-                                 const BAudioDeviceInfo* device);
-
-// Usage types.
-enum BAudioUsage {
-  kUsageAlarm,
-  kUsageMedia,
-  kUsageNotifications,
-  kUsageSystem,
-  kUsageInvalid
-};
-
-// Select the output device to be used for playback.
-//
-// Arg:
-//   brillo_audio_manager: A pointer to a BAudioManager.
-//   device: Device to set as the output device. Note that the device has to
-//           be an output device.
-//   usage: A BAudioUsage type representing a usage to route to |device|.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_setOutputDevice(
-    const BAudioManager* brillo_audio_manager, const BAudioDeviceInfo* device,
-    BAudioUsage usage);
-
-// Get the number of steps for a given stream type.
-//
-// Args:
-//   brillo_audio_manager: A pointer to a BAudioManager object.
-//   usage: A BAudioUsage representing the audio stream.
-//   max_steps: A pointer to an int representing the number of steps for a given
-//              usage.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_getMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
-                                    BAudioUsage usage,
-                                    int* max_steps);
-
-// Set the number of steps for a given stream type.
-//
-// Args:
-//   brillo_audio_manager: A pointer to a BAudioManager object.
-//   usage: A BAudioUsage representing the audio stream.
-//   max_steps: An int representing the number of steps to use for a given
-//              usage.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_setMaxVolumeSteps(const BAudioManager* brillo_audio_manager,
-                                    BAudioUsage usage,
-                                    int max_steps);
-
-// Set the volume for a given stream type.
-//
-// Args:
-//   brillo_audio_manager: A pointer to a BAudioManager object.
-//   usage: A BAudioUsage representing the audio stream.
-//   device: A pointer to a BAudioDeviceInfo object.
-//   value: An int representing the index to set the volume to. The index must
-//           be less than max_steps if BAudioManager_setMaxVolumeSteps was
-//           called or 100 otherwise.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_setVolumeIndex(const BAudioManager* brillo_audio_manager,
-                                 BAudioUsage usage,
-                                 const BAudioDeviceInfo* device,
-                                 int index);
-
-// Get the volume for a given stream type.
-//
-// Args:
-//   brillo_audio_manager: A pointer to a BAudioManager object.
-//   usage: A BAudioUsage representing the audio stream.
-//   device: A pointer to a BAudioDeviceInfo object.
-//   value: A pointer to int. This will be set to an int representing the volume
-//          index for |usage|.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_getVolumeIndex(const BAudioManager* brillo_audio_manager,
-                                 BAudioUsage usage,
-                                 const BAudioDeviceInfo* device,
-                                 int* index);
-
-// Get the default stream for volume buttons. If
-// BAudioManager_setVolumeControlUsage has not been called, this will return
-// kInvalidUsage.
-//
-// Args:
-//  brillo_audio_manager: A pointer to a BAudioManager object.
-//  usage: A pointer to a BAudioUsage representing the audio stream.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_getVolumeControlUsage(
-    const BAudioManager* brillo_audio_manager, BAudioUsage* usage);
-
-// Set the default stream to use for volume buttons. By default, streams will be
-// ordered by priority:
-//   1. kUsageAlarm
-//   2. kUsageNotifications
-//   3. kUsageSystem
-//   4. kUsageMedia
-//
-// Calling BAudioMananager_setVolumeControlUsage with kInvalidUsage will reset
-// the volume control stream to its default priorities and undo the effects of
-// previous calls to BAudioManager_setVolumeControlUsage.
-//
-// Args:
-//  brillo_audio_manager: A pointer to a BAudioManager object.
-//  usage: A BAudioUsage representing the audio stream.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_setVolumeControlUsage(
-    const BAudioManager* brillo_audio_manager, BAudioUsage usage);
-
-// Increment the volume of active streams or stream selected using
-// BAudioManager_setVolumeControlUsage.
-//
-// Args:
-//   brillo_audio_manager: A pointer to a BAudioManager object.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_incrementVolume(const BAudioManager* brillo_audio_manager);
-
-// Decrement the volume of active streams or stream selected using
-// BAudioManager_setVolumeControlUsage.
-//
-// Args:
-//   brillo_audio_manager: A pointer to a BAudioManager object.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_decrementVolume(const BAudioManager* brillo_audio_manager);
-
-// Object used for callbacks.
-struct BAudioCallback {
-  // Function to be called when an audio device is added. If multiple audio
-  // devices are added, then this function will be called multiple times. The
-  // user is not responsible for freeing added_device.
-  void (*OnAudioDeviceAdded)(const BAudioDeviceInfo* added_device,
-                             void* user_data);
-
-  // Function to be called when an audio device is removed. If multiple audio
-  // devices are removed, then this function will be called multiple times. The
-  // user is not responsible for freeing removed_device.
-  void (*OnAudioDeviceRemoved)(const BAudioDeviceInfo* removed_device,
-                               void* user_data);
-
-  // Function to be called when the volume button is pressed.
-  void (*OnVolumeChanged)(BAudioUsage usage,
-                          int old_volume_index,
-                          int new_volume_index,
-                          void* user_data);
-};
-
-typedef struct BAudioCallback BAudioCallback;
-
-// Registers a callback object that lets clients know when audio devices have
-// been added/removed from the system.
-//
-// Arg:
-//   brillo_audio_manager: A pointer to a BAudioManager.
-//   callback: An object of type BAudioCallback. The BAudioManager
-//             maintains ownership of this object.
-//   user_data : A pointer to user data. This is not used by BAudioManager and
-//               is passed as an arg to callbacks.
-//   callback_id: A pointer to an int. The int represents a token that can be
-//                used to de-register this callback. Contains 0 on failure.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_registerAudioCallback(
-    const BAudioManager* brillo_audio_manager, const BAudioCallback* callback,
-    void* user_data, int* callback_id);
-
-// Unregisters a callback object.
-//
-// Arg:
-//   brillo_audio_manager: A pointer to a BAudioManager.
-//   callback_id: A token correspoding to the callback object.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_unregisterAudioCallback(
-    const BAudioManager* brillo_audio_manager, int callback_id);
-
-// Free a Brillo audio manager object.
-//
-// Arg:
-//   brillo_audio_manager: A pointer to a BAudioManager to be freed.
-//
-// Returns 0 on success and errno on failure.
-int BAudioManager_delete(BAudioManager* brillo_audio_manager);
-
-__END_DECLS
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_MANAGER_H_
diff --git a/brillo/audio/audioservice/main_audio_service.cpp b/brillo/audio/audioservice/main_audio_service.cpp
deleted file mode 100644 (file)
index e8cb605..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#include <brillo/flag_helper.h>
-#include <brillo/syslog_logging.h>
-
-#include "audio_daemon.h"
-
-int main(int argc, char** argv) {
-  brillo::FlagHelper::Init(argc, argv, "Brillo audio service,");
-  brillo::InitLog(brillo::kLogToSyslog | brillo::kLogHeader);
-  LOG(INFO) << "Starting brilloaudioservice.";
-  brillo::AudioDaemon audio_daemon;
-  return audio_daemon.Run();
-}
diff --git a/brillo/audio/audioservice/test/audio_daemon_mock.h b/brillo/audio/audioservice/test/audio_daemon_mock.h
deleted file mode 100644 (file)
index c5ed43e..0000000
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Mock of audio daemon.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DAEMON_MOCK_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DAEMON_MOCK_H_
-
-#include <gmock/gmock.h>
-#include <gtest/gtest_prod.h>
-
-#include "audio_daemon.h"
-
-namespace brillo {
-
-class AudioDaemonMock : public AudioDaemon {
- public:
-  AudioDaemonMock() = default;
-  ~AudioDaemonMock() {}
-
- private:
-  friend class AudioDaemonTest;
-  FRIEND_TEST(AudioDaemonTest, RegisterService);
-  FRIEND_TEST(AudioDaemonTest, TestAPSConnectInitializesHandlersOnlyOnce);
-  FRIEND_TEST(AudioDaemonTest, TestDeviceCallbackInitializesBASIfNULL);
-
-  MOCK_METHOD0(InitializeHandlers, void());
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DAEMON_MOCK_H_
diff --git a/brillo/audio/audioservice/test/audio_daemon_test.cpp b/brillo/audio/audioservice/test/audio_daemon_test.cpp
deleted file mode 100644 (file)
index 3ff5482..0000000
+++ /dev/null
@@ -1,68 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Tests for audio daemon.
-
-#include "audio_daemon_mock.h"
-
-#include <memory>
-#include <vector>
-
-#include <binder/Binder.h>
-#include <binderwrapper/binder_test_base.h>
-#include <binderwrapper/stub_binder_wrapper.h>
-#include <gmock/gmock.h>
-
-#include "audio_device_handler_mock.h"
-
-using android::BinderTestBase;
-using android::IInterface;
-using std::make_shared;
-using testing::_;
-using testing::AnyNumber;
-
-namespace brillo {
-
-class AudioDaemonTest : public BinderTestBase {
- public:
-  AudioDaemonMock daemon_;
-  AudioDeviceHandlerMock device_handler_;
-};
-
-TEST_F(AudioDaemonTest, RegisterService) {
-  daemon_.InitializeBrilloAudioService();
-  EXPECT_EQ(daemon_.brillo_audio_service_,
-            binder_wrapper()->GetRegisteredService(
-                "android.brillo.brilloaudioservice.BrilloAudioService"));
-}
-
-TEST_F(AudioDaemonTest, TestAPSConnectInitializesHandlersOnlyOnce) {
-  binder_wrapper()->SetBinderForService("media.audio_policy",
-                                        binder_wrapper()->CreateLocalBinder());
-  daemon_.handlers_initialized_ = false;
-  EXPECT_CALL(daemon_, InitializeHandlers()).Times(1);
-  daemon_.ConnectToAPS();
-}
-
-TEST_F(AudioDaemonTest, TestDeviceCallbackInitializesBASIfNULL) {
-  daemon_.DeviceCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected,
-      std::vector<int>());
-  EXPECT_EQ(daemon_.brillo_audio_service_,
-            binder_wrapper()->GetRegisteredService(
-                "android.brillo.brilloaudioservice.BrilloAudioService"));
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/test/audio_device_handler_mock.h b/brillo/audio/audioservice/test/audio_device_handler_mock.h
deleted file mode 100644 (file)
index fcc711f..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Mock of AudioDeviceHandler.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DEVICE_HANDLER_MOCK_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DEVICE_HANDLER_MOCK_H_
-
-#include <base/files/file_path.h>
-#include <gmock/gmock.h>
-#include <gtest/gtest_prod.h>
-#include <system/audio.h>
-#include <system/audio_policy.h>
-
-#include "audio_device_handler.h"
-
-namespace brillo {
-
-class AudioDeviceHandlerMock : public AudioDeviceHandler {
- public:
-  AudioDeviceHandlerMock() = default;
-  ~AudioDeviceHandlerMock() {}
-
-  // Reset all local data.
-  void Reset() {
-    connected_input_devices_.clear();
-    connected_output_devices_.clear();
-    headphone_ = false;
-    microphone_ = false;
-  }
-
- private:
-  friend class AudioDeviceHandlerTest;
-  FRIEND_TEST(AudioDeviceHandlerTest,
-              DisconnectAllSupportedDevicesCallsDisconnect);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitCallsDisconnectAllSupportedDevices);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateMic);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadphone);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateHeadset);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateNone);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitialAudioStateInvalid);
-  FRIEND_TEST(AudioDeviceHandlerTest, InitCallsDisconnect);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventEmpty);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophonePresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphonePresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventMicrophoneNotPresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventHeadphoneNotPresent);
-  FRIEND_TEST(AudioDeviceHandlerTest, ProcessEventInvalid);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemNone);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectMic);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadphone);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadset);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectMic);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadphone);
-  FRIEND_TEST(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadset);
-  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceInput);
-  FRIEND_TEST(AudioDeviceHandlerTest, ConnectAudioDeviceOutput);
-  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceInput);
-  FRIEND_TEST(AudioDeviceHandlerTest, DisconnectAudioDeviceOutput);
-
-  MOCK_METHOD2(NotifyAudioPolicyService,
-               void(audio_devices_t device, audio_policy_dev_state_t state));
-  MOCK_METHOD1(TriggerCallback, void(DeviceConnectionState));
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_DEVICE_HANDLER_MOCK_H_
diff --git a/brillo/audio/audioservice/test/audio_device_handler_test.cpp b/brillo/audio/audioservice/test/audio_device_handler_test.cpp
deleted file mode 100644 (file)
index d14faa0..0000000
+++ /dev/null
@@ -1,408 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Tests for audio device handler.
-
-#include "audio_device_handler_mock.h"
-
-#include <string>
-
-#include <base/files/file_path.h>
-#include <base/files/file_util.h>
-#include <base/files/scoped_temp_dir.h>
-#include <base/strings/string_number_conversions.h>
-#include <gmock/gmock.h>
-#include <gtest/gtest.h>
-
-using base::FilePath;
-using base::IntToString;
-using base::ScopedTempDir;
-using base::WriteFile;
-using brillo::AudioDeviceHandlerMock;
-using testing::_;
-using testing::AnyNumber;
-using testing::AtLeast;
-
-namespace brillo {
-
-class AudioDeviceHandlerTest : public testing::Test {
- public:
-  void SetUp() override {
-    EXPECT_TRUE(temp_dir_.CreateUniqueTempDir());
-    h2w_file_path_ = temp_dir_.path().Append("h2wstate");
-  }
-
-  void TearDown() override { handler_.Reset(); }
-
-  // Method to store the current state of the audio jack to a file.
-  //
-  // |value| - Value in the h2w file.
-  void WriteToH2WFile(int value) {
-    std::string value_string = IntToString(value);
-    WriteFile(h2w_file_path_, value_string.c_str(), value_string.length());
-  }
-
-  AudioDeviceHandlerMock handler_;
-  FilePath h2w_file_path_;
-
- private:
-  ScopedTempDir temp_dir_;
-};
-
-// Test that DisconnectAllSupportedDevices() calls NotifyAudioPolicyService()
-// the right number of times.
-TEST_F(AudioDeviceHandlerTest, DisconnectAllSupportedDevicesCallsDisconnect) {
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(
-                  _, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)).Times(3);
-  handler_.DisconnectAllSupportedDevices();
-  EXPECT_EQ(handler_.changed_devices_.size(), 3);
-}
-
-// Test that Init() calls DisconnectAllSupportedDevices().
-TEST_F(AudioDeviceHandlerTest, InitCallsDisconnectAllSupportedDevices) {
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(
-                  _, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)).Times(3);
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected))
-      .Times(AtLeast(1));
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(
-                  _, AUDIO_POLICY_DEVICE_STATE_AVAILABLE)).Times(AnyNumber());
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected))
-      .Times(AnyNumber());
-  handler_.Init(nullptr);
-}
-
-// Test GetInitialAudioDeviceState() with just a microphone.
-TEST_F(AudioDeviceHandlerTest, InitialAudioStateMic) {
-  WriteToH2WFile(2);
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
-  handler_.GetInitialAudioDeviceState(h2w_file_path_);
-  EXPECT_NE(
-      handler_.connected_input_devices_.find(AUDIO_DEVICE_IN_WIRED_HEADSET),
-      handler_.connected_input_devices_.end());
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_IN_WIRED_HEADSET);
-}
-
-// Test GetInitialAudioDeviceState() with a headphone.
-TEST_F(AudioDeviceHandlerTest, InitialAudioStateHeadphone) {
-  WriteToH2WFile(1);
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
-  handler_.GetInitialAudioDeviceState(h2w_file_path_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_NE(
-      handler_.connected_output_devices_.find(AUDIO_DEVICE_OUT_WIRED_HEADPHONE),
-      handler_.connected_output_devices_.end());
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
-}
-
-// Test GetInitialAudioDeviceState() with a headset.
-TEST_F(AudioDeviceHandlerTest, InitialAudioStateHeadset) {
-  WriteToH2WFile(3);
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  handler_.GetInitialAudioDeviceState(h2w_file_path_);
-  EXPECT_NE(
-      handler_.connected_input_devices_.find(AUDIO_DEVICE_IN_WIRED_HEADSET),
-      handler_.connected_input_devices_.end());
-  EXPECT_NE(
-      handler_.connected_output_devices_.find(AUDIO_DEVICE_OUT_WIRED_HEADSET),
-      handler_.connected_output_devices_.end());
-  EXPECT_EQ(handler_.changed_devices_.size(), 2);
-}
-
-// Test GetInitialAudioDeviceState() without any devices connected to the audio
-// jack. No need to call NotifyAudioPolicyService() since that's already handled
-// by Init().
-TEST_F(AudioDeviceHandlerTest, InitialAudioStateNone) {
-  WriteToH2WFile(0);
-  EXPECT_CALL(handler_, TriggerCallback(_));
-  handler_.GetInitialAudioDeviceState(h2w_file_path_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_EQ(handler_.changed_devices_.size(), 0);
-}
-
-// Test GetInitialAudioDeviceState() with an invalid file. The audio handler
-// should not fail in this case because it should work on boards that don't
-// support audio jacks.
-TEST_F(AudioDeviceHandlerTest, InitialAudioStateInvalid) {
-  FilePath path = h2w_file_path_;
-  handler_.GetInitialAudioDeviceState(h2w_file_path_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-}
-
-// Test ProcessEvent() with an empty input_event arg.
-TEST_F(AudioDeviceHandlerTest, ProcessEventEmpty) {
-  struct input_event event;
-  event.type = 0;
-  event.code = 0;
-  event.value = 0;
-  EXPECT_CALL(handler_, TriggerCallback(_));
-  handler_.ProcessEvent(event);
-  EXPECT_FALSE(handler_.headphone_);
-  EXPECT_FALSE(handler_.microphone_);
-}
-
-// Test ProcessEvent() with a microphone present input_event arg.
-TEST_F(AudioDeviceHandlerTest, ProcessEventMicrophonePresent) {
-  struct input_event event;
-  event.type = EV_SW;
-  event.code = SW_MICROPHONE_INSERT;
-  event.value = 1;
-  handler_.ProcessEvent(event);
-  EXPECT_FALSE(handler_.headphone_);
-  EXPECT_TRUE(handler_.microphone_);
-}
-
-// Test ProcessEvent() with a headphone present input_event arg.
-TEST_F(AudioDeviceHandlerTest, ProcessEventHeadphonePresent) {
-  struct input_event event;
-  event.type = EV_SW;
-  event.code = SW_HEADPHONE_INSERT;
-  event.value = 1;
-  handler_.ProcessEvent(event);
-  EXPECT_TRUE(handler_.headphone_);
-  EXPECT_FALSE(handler_.microphone_);
-}
-
-// Test ProcessEvent() with a microphone not present input_event arg.
-TEST_F(AudioDeviceHandlerTest, ProcessEventMicrophoneNotPresent) {
-  struct input_event event;
-  event.type = EV_SW;
-  event.code = SW_MICROPHONE_INSERT;
-  event.value = 0;
-  handler_.ProcessEvent(event);
-  EXPECT_FALSE(handler_.headphone_);
-  EXPECT_FALSE(handler_.microphone_);
-}
-
-// Test ProcessEvent() with a headphone not preset input_event arg.
-TEST_F(AudioDeviceHandlerTest, ProcessEventHeadphoneNotPresent) {
-  struct input_event event;
-  event.type = EV_SW;
-  event.code = SW_HEADPHONE_INSERT;
-  event.value = 0;
-  handler_.ProcessEvent(event);
-  EXPECT_FALSE(handler_.headphone_);
-  EXPECT_FALSE(handler_.microphone_);
-}
-
-// Test ProcessEvent() with an unsupported input_event arg.
-TEST_F(AudioDeviceHandlerTest, ProcessEventInvalid) {
-  struct input_event event;
-  event.type = EV_SW;
-  event.code = SW_MAX;
-  event.value = 0;
-  handler_.ProcessEvent(event);
-  EXPECT_FALSE(handler_.headphone_);
-  EXPECT_FALSE(handler_.microphone_);
-}
-
-// Test UpdateAudioSystem() without any devices connected.
-TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemNone) {
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(
-                  _, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)).Times(0);
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
-  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
-  EXPECT_EQ(handler_.changed_devices_.size(), 0);
-}
-
-// Test UpdateAudioSystem() when disconnecting a microphone.
-TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectMic) {
-  audio_devices_t device = AUDIO_DEVICE_IN_WIRED_HEADSET;
-  handler_.connected_input_devices_.insert(device);
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(device,
-                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
-  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], device);
-}
-
-// Test UpdateAudioSystem() when disconnecting a headphone.
-TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadphone) {
-  audio_devices_t device = AUDIO_DEVICE_OUT_WIRED_HEADPHONE;
-  handler_.connected_output_devices_.insert(device);
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(device,
-                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
-  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], device);
-}
-
-// Test UpdateAudioSystem() when disconnecting a headset & headphones.
-TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemDisconnectHeadset) {
-  handler_.connected_input_devices_.insert(AUDIO_DEVICE_IN_WIRED_HEADSET);
-  handler_.connected_output_devices_.insert(AUDIO_DEVICE_OUT_WIRED_HEADSET);
-  handler_.connected_output_devices_.insert(AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
-                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesDisconnected));
-  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_EQ(handler_.changed_devices_.size(), 3);
-}
-
-// Test UpdateAudioSystem() when connecting a microphone.
-TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemConnectMic) {
-  handler_.microphone_ = true;
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
-  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 1);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_IN_WIRED_HEADSET);
-}
-
-// Test UpdateAudioSystem() when connecting a headphone.
-TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadphone) {
-  handler_.headphone_ = true;
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADPHONE,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
-  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
-}
-
-// Test UpdateAudioSystem() when connecting a headset.
-TEST_F(AudioDeviceHandlerTest, UpdateAudioSystemConnectHeadset) {
-  handler_.headphone_ = true;
-  handler_.microphone_ = true;
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_IN_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(AUDIO_DEVICE_OUT_WIRED_HEADSET,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  EXPECT_CALL(handler_, TriggerCallback(
-      AudioDeviceHandlerMock::DeviceConnectionState::kDevicesConnected));
-  handler_.UpdateAudioSystem(handler_.headphone_, handler_.microphone_);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 1);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_.size(), 2);
-}
-
-// Test ConnectAudioDevice() with an input device.
-TEST_F(AudioDeviceHandlerTest, ConnectAudioDeviceInput) {
-  audio_devices_t device = AUDIO_DEVICE_IN_WIRED_HEADSET;
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(device,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  handler_.ConnectAudioDevice(device);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_NE(
-      handler_.connected_input_devices_.find(device),
-      handler_.connected_input_devices_.end());
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], device);
-}
-
-// Test ConnectAudioDevice() with an output device.
-TEST_F(AudioDeviceHandlerTest, ConnectAudioDeviceOutput) {
-  audio_devices_t device = AUDIO_DEVICE_OUT_WIRED_HEADSET;
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(device,
-                                       AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
-  handler_.ConnectAudioDevice(device);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_NE(
-      handler_.connected_output_devices_.find(device),
-      handler_.connected_output_devices_.end());
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], device);
-}
-
-// Test DisconnectAudioDevice() with an input device.
-TEST_F(AudioDeviceHandlerTest, DisconnectAudioDeviceInput) {
-  audio_devices_t device = AUDIO_DEVICE_IN_WIRED_HEADSET;
-  handler_.connected_input_devices_.insert(device);
-  handler_.connected_output_devices_.insert(device);
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(device,
-                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-  handler_.DisconnectAudioDevice(device);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 0);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], device);
-}
-
-// Test DisconnectAudioDevice() with an output device.
-TEST_F(AudioDeviceHandlerTest, DisconnectAudioDeviceOutput) {
-  audio_devices_t device = AUDIO_DEVICE_OUT_WIRED_HEADSET;
-  handler_.connected_input_devices_.insert(device);
-  handler_.connected_output_devices_.insert(device);
-  EXPECT_CALL(handler_,
-              NotifyAudioPolicyService(device,
-                                       AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-  handler_.DisconnectAudioDevice(device);
-  EXPECT_EQ(handler_.connected_input_devices_.size(), 1);
-  EXPECT_EQ(handler_.connected_output_devices_.size(), 0);
-  EXPECT_EQ(handler_.changed_devices_.size(), 1);
-  EXPECT_EQ(handler_.changed_devices_[0], device);
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/test/audio_service_callback_test.cpp b/brillo/audio/audioservice/test/audio_service_callback_test.cpp
deleted file mode 100644 (file)
index 38ced10..0000000
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Tests for the audio service callback object.
-
-#include <gmock/gmock.h>
-#include <gtest/gtest.h>
-
-#include <hardware/audio.h>
-
-#include "audio_service_callback.h"
-
-namespace brillo {
-
-class AudioServiceCallbackTest : public testing::Test {
- public:
-  void SetUp() override {
-    connected_call_count_ = 0;
-    disconnected_call_count_ = 0;
-    callback_.OnAudioDeviceAdded = OnDeviceConnectedMock;
-    callback_.OnAudioDeviceRemoved = OnDeviceDisconnectedMock;
-    user_data_ = static_cast<void*>(this);
-  }
-
-  static void OnDeviceConnectedMock(const BAudioDeviceInfo*, void* user_data) {
-    static_cast<AudioServiceCallbackTest*>(user_data)->connected_call_count_++;
-  }
-
-  static void OnDeviceDisconnectedMock(const BAudioDeviceInfo*, void* user_data) {
-    static_cast<AudioServiceCallbackTest*>(
-        user_data)->disconnected_call_count_++;
-  }
-
-  BAudioCallback callback_;
-  void* user_data_;
-  int connected_call_count_;
-  int disconnected_call_count_;
-};
-
-TEST_F(AudioServiceCallbackTest, CallbackCallCount) {
-  std::vector<int> devices = {AUDIO_DEVICE_OUT_WIRED_HEADSET,
-    AUDIO_DEVICE_OUT_WIRED_HEADPHONE};
-  AudioServiceCallback service_callback(&callback_, user_data_);
-  service_callback.OnAudioDevicesConnected(devices);
-  EXPECT_EQ(connected_call_count_, devices.size());
-  service_callback.OnAudioDevicesDisconnected(devices);
-  EXPECT_EQ(disconnected_call_count_, devices.size());
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/test/audio_volume_handler_mock.h b/brillo/audio/audioservice/test/audio_volume_handler_mock.h
deleted file mode 100644 (file)
index 32028ca..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Mock of AudioVolumeHandler.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_VOLUME_HANDLER_MOCK_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_VOLUME_HANDLER_MOCK_H_
-
-#include <gmock/gmock.h>
-#include <gtest/gtest_prod.h>
-
-#include "audio_volume_handler.h"
-
-namespace brillo {
-
-class AudioVolumeHandlerMock : public AudioVolumeHandler {
- public:
-  AudioVolumeHandlerMock() = default;
-  ~AudioVolumeHandlerMock() {}
-
- private:
-  friend class AudioVolumeHandlerTest;
-  FRIEND_TEST(AudioVolumeHandlerTest, FileGeneration);
-  FRIEND_TEST(AudioVolumeHandlerTest, GetVolumeForKey);
-  FRIEND_TEST(AudioVolumeHandlerTest, GetVolumeForStreamDeviceTuple);
-  FRIEND_TEST(AudioVolumeHandlerTest, SetVolumeForStreamDeviceTuple);
-  FRIEND_TEST(AudioVolumeHandlerTest, InitNoFile);
-  FRIEND_TEST(AudioVolumeHandlerTest, InitFilePresent);
-  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventEmpty);
-  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyUp);
-  FRIEND_TEST(AudioVolumeHandlerTest, ProcessEventKeyDown);
-  FRIEND_TEST(AudioVolumeHandlerTest, SelectStream);
-  FRIEND_TEST(AudioVolumeHandlerTest, ComputeNewVolume);
-  FRIEND_TEST(AudioVolumeHandlerTest, GetSetVolumeIndex);
-
-  MOCK_METHOD3(TriggerCallback, void(audio_stream_type_t, int, int));
-  MOCK_METHOD0(InitAPSAllStreams, void());
-  MOCK_METHOD1(AdjustVolumeActiveStreams, void(int));
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_TEST_AUDIO_VOLUME_HANDLER_MOCK_H_
diff --git a/brillo/audio/audioservice/test/audio_volume_handler_test.cpp b/brillo/audio/audioservice/test/audio_volume_handler_test.cpp
deleted file mode 100644 (file)
index 47ef236..0000000
+++ /dev/null
@@ -1,212 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Tests for audio volume handler.
-
-#include "audio_volume_handler_mock.h"
-
-#include <memory>
-#include <string>
-
-#include <base/files/file_path.h>
-#include <base/files/file_util.h>
-#include <base/files/scoped_temp_dir.h>
-#include <brillo/key_value_store.h>
-#include <brillo/strings/string_utils.h>
-#include <gmock/gmock.h>
-#include <gtest/gtest.h>
-
-#include "audio_device_handler.h"
-
-using base::FilePath;
-using base::PathExists;
-using base::ScopedTempDir;
-using brillo::string_utils::ToString;
-using std::stoi;
-using testing::_;
-
-namespace brillo {
-
-class AudioVolumeHandlerTest : public testing::Test {
- public:
-  void SetUp() override {
-    EXPECT_TRUE(temp_dir_.CreateUniqueTempDir());
-    volume_file_path_ = temp_dir_.path().Append("vol_file");
-    handler_.SetVolumeFilePathForTesting(volume_file_path_);
-  }
-
-  void SetupHandlerVolumeFile() {
-    handler_.kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore);
-    handler_.GenerateVolumeFile();
-  }
-
-  AudioVolumeHandlerMock handler_;
-  FilePath volume_file_path_;
-
- private:
-  ScopedTempDir temp_dir_;
-};
-
-// Test that the volume file is formatted correctly.
-TEST_F(AudioVolumeHandlerTest, FileGeneration) {
-  SetupHandlerVolumeFile();
-  KeyValueStore kv_store;
-  kv_store.Load(volume_file_path_);
-  for (auto stream : handler_.kSupportedStreams_) {
-    std::string value;
-    ASSERT_EQ(handler_.kMinIndex_, 0);
-    ASSERT_EQ(handler_.kMaxIndex_, 100);
-    for (auto device : AudioDeviceHandler::kSupportedOutputDevices_) {
-      ASSERT_TRUE(kv_store.GetString(handler_.kCurrentIndexKey_ + "." +
-                                         ToString(stream) + "." +
-                                         ToString(device),
-                                     &value));
-      ASSERT_EQ(handler_.kDefaultCurrentIndex_, stoi(value));
-    }
-  }
-}
-
-// Test GetVolumeCurrentIndex.
-TEST_F(AudioVolumeHandlerTest, GetVolumeForStreamDeviceTuple) {
-  handler_.kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore);
-  handler_.kv_store_->SetString(handler_.kCurrentIndexKey_ + ".1.2", "100");
-  ASSERT_EQ(
-      handler_.GetVolumeCurrentIndex(static_cast<audio_stream_type_t>(1), 2),
-      100);
-}
-
-// Test SetVolumeCurrentIndex.
-TEST_F(AudioVolumeHandlerTest, SetVolumeForStreamDeviceTuple) {
-  handler_.kv_store_ = std::unique_ptr<KeyValueStore>(new KeyValueStore);
-  handler_.PersistVolumeConfiguration(
-      static_cast<audio_stream_type_t>(1), 2, 100);
-  std::string value;
-  auto key = handler_.kCurrentIndexKey_ + ".1.2";
-  handler_.kv_store_->GetString(key, &value);
-  ASSERT_EQ(stoi(value), 100);
-}
-
-// Test that a new volume file is generated if it doesn't exist.
-TEST_F(AudioVolumeHandlerTest, InitNoFile) {
-  EXPECT_CALL(handler_, InitAPSAllStreams());
-  handler_.Init(nullptr);
-  EXPECT_TRUE(PathExists(volume_file_path_));
-}
-
-// Test that a new volume file isn't generated it already exists.
-TEST_F(AudioVolumeHandlerTest, InitFilePresent) {
-  KeyValueStore kv_store;
-  kv_store.SetString("foo", "100");
-  kv_store.Save(volume_file_path_);
-  EXPECT_CALL(handler_, InitAPSAllStreams());
-  handler_.Init(nullptr);
-  EXPECT_TRUE(PathExists(volume_file_path_));
-  std::string value;
-  handler_.kv_store_->GetString("foo", &value);
-  EXPECT_EQ(stoi(value), 100);
-}
-
-TEST_F(AudioVolumeHandlerTest, ProcessEventEmpty) {
-  struct input_event event;
-  event.type = 0;
-  event.code = 0;
-  event.value = 0;
-  EXPECT_CALL(handler_, AdjustVolumeActiveStreams(_)).Times(0);
-  handler_.ProcessEvent(event);
-}
-
-TEST_F(AudioVolumeHandlerTest, ProcessEventKeyUp) {
-  struct input_event event;
-  event.type = EV_KEY;
-  event.code = KEY_VOLUMEUP;
-  event.value = 1;
-  EXPECT_CALL(handler_, AdjustVolumeActiveStreams(1));
-  handler_.ProcessEvent(event);
-}
-
-TEST_F(AudioVolumeHandlerTest, ProcessEventKeyDown) {
-  struct input_event event;
-  event.type = EV_KEY;
-  event.code = KEY_VOLUMEDOWN;
-  event.value = 1;
-  EXPECT_CALL(handler_, AdjustVolumeActiveStreams(-1));
-  handler_.ProcessEvent(event);
-}
-
-TEST_F(AudioVolumeHandlerTest, SelectStream) {
-  EXPECT_EQ(handler_.GetVolumeControlStream(), AUDIO_STREAM_DEFAULT);
-  handler_.SetVolumeControlStream(AUDIO_STREAM_MUSIC);
-  EXPECT_EQ(handler_.GetVolumeControlStream(), AUDIO_STREAM_MUSIC);
-}
-
-TEST_F(AudioVolumeHandlerTest, ComputeNewVolume) {
-  EXPECT_EQ(handler_.GetNewVolumeIndex(50, 1, AUDIO_STREAM_MUSIC), 51);
-  EXPECT_EQ(handler_.GetNewVolumeIndex(50, -1, AUDIO_STREAM_MUSIC), 49);
-  handler_.step_sizes_[AUDIO_STREAM_MUSIC] = 10;
-  EXPECT_EQ(handler_.GetNewVolumeIndex(50, 1, AUDIO_STREAM_MUSIC), 60);
-  EXPECT_EQ(handler_.GetNewVolumeIndex(50, -1, AUDIO_STREAM_MUSIC), 40);
-  SetupHandlerVolumeFile();
-  EXPECT_EQ(handler_.GetNewVolumeIndex(100, 1, AUDIO_STREAM_MUSIC), 100);
-  EXPECT_EQ(handler_.GetNewVolumeIndex(0, -1, AUDIO_STREAM_MUSIC), 0);
-}
-
-TEST_F(AudioVolumeHandlerTest, GetSetMaxSteps) {
-  EXPECT_EQ(handler_.GetVolumeMaxSteps(AUDIO_STREAM_MUSIC), 100);
-  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 0), EINVAL);
-  EXPECT_EQ(handler_.GetVolumeMaxSteps(AUDIO_STREAM_MUSIC), 100);
-  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 100), 0);
-  EXPECT_EQ(handler_.GetVolumeMaxSteps(AUDIO_STREAM_MUSIC), 100);
-  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, -1), EINVAL);
-  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 101), EINVAL);
-}
-
-TEST_F(AudioVolumeHandlerTest, GetSetVolumeIndex) {
-  SetupHandlerVolumeFile();
-  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, _, 0));
-  EXPECT_EQ(handler_.SetVolumeIndex(
-                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 0),
-            0);
-  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, 0, 50));
-  EXPECT_EQ(handler_.SetVolumeIndex(
-                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 50),
-            0);
-  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, 50, 100));
-  EXPECT_EQ(handler_.SetVolumeIndex(
-                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 100),
-            0);
-  EXPECT_EQ(handler_.SetVolumeIndex(
-                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, -1),
-            EINVAL);
-  EXPECT_EQ(handler_.SetVolumeIndex(
-                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 101),
-            EINVAL);
-  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 10), 0);
-  EXPECT_EQ(handler_.GetVolumeIndex(AUDIO_STREAM_MUSIC,
-                                    AUDIO_DEVICE_OUT_WIRED_HEADSET),
-            10);
-  EXPECT_EQ(handler_.SetVolumeIndex(
-                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 11),
-            EINVAL);
-  EXPECT_CALL(handler_, TriggerCallback(AUDIO_STREAM_MUSIC, 100, 50));
-  EXPECT_EQ(handler_.SetVolumeIndex(
-                AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADSET, 5),
-            0);
-  EXPECT_EQ(handler_.SetVolumeMaxSteps(AUDIO_STREAM_MUSIC, 20), 0);
-  EXPECT_EQ(handler_.GetVolumeIndex(AUDIO_STREAM_MUSIC,
-                                    AUDIO_DEVICE_OUT_WIRED_HEADSET),
-            10);
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/test/brillo_audio_client_mock.h b/brillo/audio/audioservice/test/brillo_audio_client_mock.h
deleted file mode 100644 (file)
index 047c7c3..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Mock for the brillo audio client.
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_MOCK_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_MOCK_H_
-
-#include <gmock/gmock.h>
-#include <gtest/gtest_prod.h>
-
-#include "brillo_audio_client.h"
-
-namespace brillo {
-
-class BrilloAudioClientMock : public BrilloAudioClient {
- public:
-  virtual ~BrilloAudioClientMock() = default;
-
-  MOCK_METHOD0(OnBASDisconnect, void());
-
- private:
-  friend class BrilloAudioClientTest;
-  FRIEND_TEST(BrilloAudioClientTest,
-              CheckInitializeRegistersForDeathNotifications);
-  FRIEND_TEST(BrilloAudioClientTest, InitializeNoService);
-
-  BrilloAudioClientMock() = default;
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_CLIENT_MOCK_H_
diff --git a/brillo/audio/audioservice/test/brillo_audio_client_test.cpp b/brillo/audio/audioservice/test/brillo_audio_client_test.cpp
deleted file mode 100644 (file)
index 3616c7b..0000000
+++ /dev/null
@@ -1,287 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Tests for the brillo audio client.
-
-#include <binderwrapper/binder_test_base.h>
-#include <binderwrapper/stub_binder_wrapper.h>
-#include <gmock/gmock.h>
-#include <gtest/gtest.h>
-
-#include "audio_service_callback.h"
-#include "brillo_audio_client.h"
-#include "include/brillo_audio_manager.h"
-#include "test/brillo_audio_client_mock.h"
-#include "test/brillo_audio_service_mock.h"
-
-using android::sp;
-using android::String8;
-using testing::Return;
-using testing::_;
-
-namespace brillo {
-
-static const char kBrilloAudioServiceName[] =
-    "android.brillo.brilloaudioservice.BrilloAudioService";
-
-class BrilloAudioClientTest : public android::BinderTestBase {
- public:
-  bool ConnectClientToBAS() {
-    bas_ = new BrilloAudioServiceMock();
-    binder_wrapper()->SetBinderForService(kBrilloAudioServiceName, bas_);
-    return client_.Initialize();
-  }
-
-  BrilloAudioClientMock client_;
-  sp<BrilloAudioServiceMock> bas_;
-};
-
-TEST_F(BrilloAudioClientTest, SetDeviceNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(
-      client_.SetDevice(AUDIO_POLICY_FORCE_USE_MAX, AUDIO_POLICY_FORCE_NONE),
-      ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, GetDevicesNoService) {
-  std::vector<int> foo;
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.GetDevices(0, foo), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, RegisterCallbackNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.RegisterAudioCallback(nullptr, nullptr), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, UnregisterAudioCallbackNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.UnregisterAudioCallback(0), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, InitializeNoService) {
-  EXPECT_FALSE(client_.Initialize());
-}
-
-TEST_F(BrilloAudioClientTest, CheckInitializeRegistersForDeathNotifications) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_CALL(client_, OnBASDisconnect());
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-}
-
-TEST_F(BrilloAudioClientTest, GetDevicesWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  std::vector<int> foo;
-  EXPECT_CALL(*bas_.get(), GetDevices(0, &foo)).WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.GetDevices(0, foo), 0);
-}
-
-TEST_F(BrilloAudioClientTest, SetDeviceWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  std::vector<int> foo;
-  EXPECT_CALL(*bas_.get(),
-              SetDevice(AUDIO_POLICY_FORCE_USE_MAX, AUDIO_POLICY_FORCE_NONE))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(
-      client_.SetDevice(AUDIO_POLICY_FORCE_USE_MAX, AUDIO_POLICY_FORCE_NONE),
-      0);
-}
-
-TEST_F(BrilloAudioClientTest, RegisterCallbackWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  BAudioCallback bcallback;
-  AudioServiceCallback* callback =
-      new AudioServiceCallback(&bcallback, nullptr);
-  int id = 0;
-  EXPECT_CALL(*bas_.get(),
-              RegisterServiceCallback(sp<IAudioServiceCallback>(callback)))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), 0);
-  EXPECT_NE(id, 0);
-}
-
-TEST_F(BrilloAudioClientTest, RegisterSameCallbackTwiceWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  BAudioCallback bcallback;
-  AudioServiceCallback* callback =
-      new AudioServiceCallback(&bcallback, nullptr);
-  int id = -1;
-  EXPECT_CALL(*bas_.get(),
-              RegisterServiceCallback(sp<IAudioServiceCallback>(callback)))
-      .Times(2)
-      .WillRepeatedly(Return(Status::ok()));
-  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), 0);
-  EXPECT_NE(id, 0);
-  id = -1;
-  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), EINVAL);
-  EXPECT_EQ(id, 0);
-}
-
-TEST_F(BrilloAudioClientTest, UnregisterAudioCallbackValidWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  BAudioCallback bcallback;
-  AudioServiceCallback* callback =
-      new AudioServiceCallback(&bcallback, nullptr);
-  int id = 0;
-  EXPECT_CALL(*bas_.get(),
-              RegisterServiceCallback(sp<IAudioServiceCallback>(callback)))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.RegisterAudioCallback(callback, &id), 0);
-  EXPECT_NE(id, 0);
-  EXPECT_CALL(*bas_.get(),
-              UnregisterServiceCallback(sp<IAudioServiceCallback>(callback)))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.UnregisterAudioCallback(id), 0);
-}
-
-TEST_F(BrilloAudioClientTest, UnregisterInvalidCallbackWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_EQ(client_.UnregisterAudioCallback(1), EINVAL);
-}
-
-TEST_F(BrilloAudioClientTest, RegisterAndUnregisterAudioTwoCallbacks) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  BAudioCallback bcallback1, bcallback2;
-  AudioServiceCallback* callback1 =
-      new AudioServiceCallback(&bcallback1, nullptr);
-  AudioServiceCallback* callback2 =
-      new AudioServiceCallback(&bcallback2, nullptr);
-  int id1 = 0, id2 = 0;
-  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
-      .WillRepeatedly(Return(Status::ok()));
-  EXPECT_EQ(client_.RegisterAudioCallback(callback1, &id1), 0);
-  EXPECT_NE(id1, 0);
-  EXPECT_EQ(client_.RegisterAudioCallback(callback2, &id2), 0);
-  EXPECT_NE(id2, 0);
-  EXPECT_CALL(*bas_.get(), UnregisterServiceCallback(_))
-      .WillRepeatedly(Return(Status::ok()));
-  EXPECT_EQ(client_.UnregisterAudioCallback(id1), 0);
-  EXPECT_EQ(client_.UnregisterAudioCallback(id2), 0);
-}
-
-TEST_F(BrilloAudioClientTest, GetMaxVolStepsNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  int foo;
-  EXPECT_EQ(client_.GetMaxVolumeSteps(BAudioUsage::kUsageInvalid, &foo),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, GetMaxVolStepsWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  int foo;
-  EXPECT_CALL(*bas_.get(), GetMaxVolumeSteps(AUDIO_STREAM_MUSIC, &foo))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.GetMaxVolumeSteps(BAudioUsage::kUsageMedia, &foo), 0);
-}
-
-TEST_F(BrilloAudioClientTest, SetMaxVolStepsNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.SetMaxVolumeSteps(BAudioUsage::kUsageInvalid, 100),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, SetMaxVolStepsWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_CALL(*bas_.get(), SetMaxVolumeSteps(AUDIO_STREAM_MUSIC, 100))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.SetMaxVolumeSteps(BAudioUsage::kUsageMedia, 100), 0);
-}
-
-TEST_F(BrilloAudioClientTest, SetVolIndexNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.SetVolumeIndex(
-                BAudioUsage::kUsageInvalid, AUDIO_DEVICE_NONE, 100),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, SetVolIndexWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_CALL(*bas_.get(),
-              SetVolumeIndex(AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_SPEAKER, 100))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.SetVolumeIndex(
-                BAudioUsage::kUsageMedia, AUDIO_DEVICE_OUT_SPEAKER, 100),
-            0);
-}
-
-TEST_F(BrilloAudioClientTest, GetVolIndexNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  int foo;
-  EXPECT_EQ(client_.GetVolumeIndex(
-                BAudioUsage::kUsageInvalid, AUDIO_DEVICE_NONE, &foo),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, GetVolIndexWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  int foo;
-  EXPECT_CALL(
-      *bas_.get(),
-      GetVolumeIndex(AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_SPEAKER, &foo))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.GetVolumeIndex(
-                BAudioUsage::kUsageMedia, AUDIO_DEVICE_OUT_SPEAKER, &foo),
-            0);
-}
-
-TEST_F(BrilloAudioClientTest, GetVolumeControlStreamNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  BAudioUsage foo;
-  EXPECT_EQ(client_.GetVolumeControlStream(&foo), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, GetVolumeControlStreamWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_CALL(*bas_.get(), GetVolumeControlStream(_))
-      .WillOnce(Return(Status::ok()));
-  BAudioUsage foo;
-  EXPECT_EQ(client_.GetVolumeControlStream(&foo), 0);
-}
-
-TEST_F(BrilloAudioClientTest, SetVolumeControlStreamNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.SetVolumeControlStream(kUsageMedia), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, SetVolumeControlStreamWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_CALL(*bas_.get(), SetVolumeControlStream(AUDIO_STREAM_MUSIC))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.SetVolumeControlStream(kUsageMedia), 0);
-}
-
-TEST_F(BrilloAudioClientTest, IncrementVolNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.IncrementVolume(), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, IncrementVolWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_CALL(*bas_.get(), IncrementVolume()).WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.IncrementVolume(), 0);
-}
-
-TEST_F(BrilloAudioClientTest, DecrementVolNoService) {
-  EXPECT_CALL(client_, OnBASDisconnect());
-  EXPECT_EQ(client_.DecrementVolume(), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioClientTest, DecrementVolWithBAS) {
-  EXPECT_TRUE(ConnectClientToBAS());
-  EXPECT_CALL(*bas_.get(), DecrementVolume()).WillOnce(Return(Status::ok()));
-  EXPECT_EQ(client_.DecrementVolume(), 0);
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/test/brillo_audio_device_info_internal_test.cpp b/brillo/audio/audioservice/test/brillo_audio_device_info_internal_test.cpp
deleted file mode 100644 (file)
index d02608c..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Tests for the BrilloAudioDeviceInfoInternal test.
-
-#include <gmock/gmock.h>
-#include <gtest/gtest.h>
-
-#include <hardware/audio.h>
-
-#include "brillo_audio_device_info_internal.h"
-
-namespace brillo {
-
-TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadset) {
-  BAudioDeviceInfoInternal* badi =
-      BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
-          AUDIO_DEVICE_OUT_WIRED_HEADSET);
-  EXPECT_EQ(badi->device_id_, TYPE_WIRED_HEADSET);
-  EXPECT_EQ(badi->GetConfig(), AUDIO_POLICY_FORCE_HEADPHONES);
-}
-
-TEST(BrilloAudioDeviceInfoInternalTest, OutWiredHeadphone) {
-  BAudioDeviceInfoInternal* badi =
-      BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
-          AUDIO_DEVICE_OUT_WIRED_HEADPHONE);
-  EXPECT_EQ(badi->device_id_, TYPE_WIRED_HEADPHONES);
-  EXPECT_EQ(badi->GetConfig(), AUDIO_POLICY_FORCE_HEADPHONES);
-}
-
-TEST(BrilloAudioDeviceInfoInternalTest, InWiredHeadset) {
-  BAudioDeviceInfoInternal* badi =
-      BAudioDeviceInfoInternal::CreateFromAudioDevicesT(
-          AUDIO_DEVICE_IN_WIRED_HEADSET);
-  EXPECT_EQ(badi->device_id_, TYPE_WIRED_HEADSET_MIC);
-  EXPECT_EQ(badi->GetConfig(), AUDIO_POLICY_FORCE_HEADPHONES);
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/test/brillo_audio_manager_test.cpp b/brillo/audio/audioservice/test/brillo_audio_manager_test.cpp
deleted file mode 100644 (file)
index b4299f7..0000000
+++ /dev/null
@@ -1,497 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-// Tests for the brillo audio manager interface.
-
-#include <binderwrapper/binder_test_base.h>
-#include <binderwrapper/stub_binder_wrapper.h>
-#include <gmock/gmock.h>
-#include <gtest/gtest.h>
-
-#include "audio_service_callback.h"
-#include "brillo_audio_client.h"
-#include "include/brillo_audio_manager.h"
-#include "test/brillo_audio_service_mock.h"
-
-using android::sp;
-using testing::Mock;
-using testing::Return;
-using testing::_;
-
-namespace brillo {
-
-static const char kBrilloAudioServiceName[] =
-    "android.brillo.brilloaudioservice.BrilloAudioService";
-
-class BrilloAudioManagerTest : public android::BinderTestBase {
- public:
-  void ConnectBAS() {
-    bas_ = new BrilloAudioServiceMock();
-    binder_wrapper()->SetBinderForService(kBrilloAudioServiceName, bas_);
-  }
-
-  BAudioManager* GetValidManager() {
-    ConnectBAS();
-    auto bam = BAudioManager_new();
-    EXPECT_NE(bam, nullptr);
-    return bam;
-  }
-
-  void TearDown() {
-    // Stopping the BAS will cause the client to delete itself.
-    binder_wrapper()->NotifyAboutBinderDeath(bas_);
-    bas_.clear();
-  }
-
-  sp<BrilloAudioServiceMock> bas_;
-};
-
-TEST_F(BrilloAudioManagerTest, NewNoService) {
-  EXPECT_EQ(BAudioManager_new(), nullptr);
-}
-
-TEST_F(BrilloAudioManagerTest, NewWithBAS) {
-  ConnectBAS();
-  auto bam = BAudioManager_new();
-  EXPECT_NE(bam, nullptr);
-}
-
-TEST_F(BrilloAudioManagerTest, GetDevicesInvalidParams) {
-  auto bam = GetValidManager();
-  unsigned int num_devices;
-  EXPECT_EQ(BAudioManager_getDevices(nullptr, 1, nullptr, 0, &num_devices),
-            EINVAL);
-  EXPECT_EQ(BAudioManager_getDevices(bam, 1, nullptr, 0, nullptr), EINVAL);
-  EXPECT_EQ(BAudioManager_getDevices(bam, -1, nullptr, 0, &num_devices),
-            EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, GetDevicesNullArrNoDevices) {
-  auto bam = GetValidManager();
-  unsigned int num_devices = -1;
-  EXPECT_CALL(*bas_.get(), GetDevices(1, _)).WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_getDevices(bam, 1, nullptr, 0, &num_devices), 0);
-  EXPECT_EQ(num_devices, 0);
-}
-
-TEST_F(BrilloAudioManagerTest, SetInputDeviceInvalidParams) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_UNKNOWN);
-  EXPECT_EQ(BAudioManager_setInputDevice(nullptr, nullptr), EINVAL);
-  EXPECT_EQ(BAudioManager_setInputDevice(bam, nullptr), EINVAL);
-  EXPECT_EQ(BAudioManager_setInputDevice(nullptr, device), EINVAL);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetInputDeviceHeadsetMic) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADSET_MIC);
-  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_RECORD,
-                                     AUDIO_POLICY_FORCE_HEADPHONES))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setInputDevice(bam, device), 0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetInputDeviceBuiltinMic) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_BUILTIN_MIC);
-  EXPECT_CALL(*bas_.get(),
-              SetDevice(AUDIO_POLICY_FORCE_FOR_RECORD, AUDIO_POLICY_FORCE_NONE))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setInputDevice(bam, device), 0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetOutputDeviceInvalidParams) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_UNKNOWN);
-  EXPECT_EQ(BAudioManager_setOutputDevice(nullptr, nullptr, kUsageMedia),
-            EINVAL);
-  EXPECT_EQ(BAudioManager_setOutputDevice(bam, nullptr, kUsageMedia), EINVAL);
-  EXPECT_EQ(BAudioManager_setOutputDevice(nullptr, device, kUsageMedia),
-            EINVAL);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetOutputDeviceWiredHeadset) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADSET);
-  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_MEDIA,
-                                     AUDIO_POLICY_FORCE_HEADPHONES))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageMedia), 0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetOutputDeviceBuiltinSpeaker) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_BUILTIN_SPEAKER);
-  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_SYSTEM,
-                                     AUDIO_POLICY_FORCE_SPEAKER))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageSystem), 0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetOutputDeviceWiredHeadphoneNotification) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_SYSTEM,
-                                     AUDIO_POLICY_FORCE_HEADPHONES))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageNotifications), 0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetOutputDeviceWiredHeadphoneAlarm) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  EXPECT_CALL(*bas_.get(), SetDevice(AUDIO_POLICY_FORCE_FOR_SYSTEM,
-                                     AUDIO_POLICY_FORCE_HEADPHONES))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageAlarm), 0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, RegisterCallbackInvalidParams) {
-  auto bam = GetValidManager();
-  BAudioCallback callback;
-  int callback_id;
-  EXPECT_EQ(
-      BAudioManager_registerAudioCallback(nullptr, nullptr, nullptr, nullptr),
-      EINVAL);
-  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, nullptr, nullptr, nullptr),
-            EINVAL);
-  EXPECT_EQ(
-      BAudioManager_registerAudioCallback(bam, &callback, nullptr, nullptr),
-      EINVAL);
-  EXPECT_EQ(
-      BAudioManager_registerAudioCallback(bam, nullptr, nullptr, &callback_id),
-      EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, RegisterCallbackOnStack) {
-  auto bam = GetValidManager();
-  BAudioCallback callback;
-  callback.OnAudioDeviceAdded = nullptr;
-  callback.OnAudioDeviceRemoved = nullptr;
-  int callback_id = 0;
-  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
-                                                &callback_id),
-            0);
-  EXPECT_NE(callback_id, 0);
-}
-
-TEST_F(BrilloAudioManagerTest, RegisterCallbackOnHeap) {
-  auto bam = GetValidManager();
-  BAudioCallback* callback = new BAudioCallback;
-  callback->OnAudioDeviceAdded = nullptr;
-  callback->OnAudioDeviceRemoved = nullptr;
-  int callback_id = 0;
-  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(
-      BAudioManager_registerAudioCallback(bam, callback, nullptr, &callback_id),
-      0);
-  EXPECT_NE(callback_id, 0);
-  delete callback;
-}
-
-TEST_F(BrilloAudioManagerTest, UnregisterCallbackInvalidParams) {
-  auto bam = GetValidManager();
-  EXPECT_EQ(BAudioManager_unregisterAudioCallback(nullptr, 1), EINVAL);
-  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, 1), EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, UnregisterCallback) {
-  auto bam = GetValidManager();
-  BAudioCallback callback;
-  callback.OnAudioDeviceAdded = nullptr;
-  callback.OnAudioDeviceRemoved = nullptr;
-  int callback_id = 0;
-  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
-                                                &callback_id),
-            0);
-  EXPECT_NE(callback_id, 0);
-  EXPECT_CALL(*bas_.get(), UnregisterServiceCallback(_))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, callback_id), 0);
-  // 2nd call shouldn't result in a call to BAS.
-  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, callback_id), EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, GetDevicesBASDies) {
-  auto bam = GetValidManager();
-  unsigned int num_devices = -1;
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_getDevices(bam, 1, nullptr, 0, &num_devices),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioManagerTest, SetInputDeviceBASDies) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADSET_MIC);
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_setInputDevice(bam, device), ECONNABORTED);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetOutputDeviceBASDies) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_setOutputDevice(bam, device, kUsageNotifications),
-            ECONNABORTED);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, RegisterServiceCallbackBASDies) {
-  auto bam = GetValidManager();
-  BAudioCallback callback;
-  callback.OnAudioDeviceAdded = nullptr;
-  callback.OnAudioDeviceRemoved = nullptr;
-  int callback_id = 1;
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
-                                                &callback_id),
-            ECONNABORTED);
-  EXPECT_EQ(callback_id, 0);
-}
-
-TEST_F(BrilloAudioManagerTest, UnregisterCallbackBASDies) {
-  auto bam = GetValidManager();
-  BAudioCallback callback;
-  callback.OnAudioDeviceAdded = nullptr;
-  callback.OnAudioDeviceRemoved = nullptr;
-  int callback_id = 0;
-  EXPECT_CALL(*bas_.get(), RegisterServiceCallback(_))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_registerAudioCallback(bam, &callback, nullptr,
-                                                &callback_id),
-            0);
-  EXPECT_NE(callback_id, 0);
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_unregisterAudioCallback(bam, callback_id),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioManagerTest, GetMaxVolumeStepsInvalidParams) {
-  auto bam = GetValidManager();
-  int foo;
-  EXPECT_EQ(BAudioManager_getMaxVolumeSteps(
-                nullptr, BAudioUsage::kUsageMedia, nullptr),
-            EINVAL);
-  EXPECT_EQ(
-      BAudioManager_getMaxVolumeSteps(nullptr, BAudioUsage::kUsageMedia, &foo),
-      EINVAL);
-  EXPECT_EQ(
-      BAudioManager_getMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, nullptr),
-      EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, GetMaxVolStepsWithBAS) {
-  auto bam = GetValidManager();
-  int foo;
-  EXPECT_CALL(*bas_.get(), GetMaxVolumeSteps(AUDIO_STREAM_MUSIC, &foo))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(
-      BAudioManager_getMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, &foo), 0);
-}
-
-TEST_F(BrilloAudioManagerTest, GetMaxVolStepsBASDies) {
-  auto bam = GetValidManager();
-  int foo;
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(
-      BAudioManager_getMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, &foo),
-      ECONNABORTED);
-}
-
-TEST_F(BrilloAudioManagerTest, SetMaxVolumeStepsInvalidParams) {
-  EXPECT_EQ(
-      BAudioManager_setMaxVolumeSteps(nullptr, BAudioUsage::kUsageMedia, 100),
-      EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, SetMaxVolStepsWithBAS) {
-  auto bam = GetValidManager();
-  EXPECT_CALL(*bas_.get(), SetMaxVolumeSteps(AUDIO_STREAM_MUSIC, 100))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, 100),
-            0);
-}
-
-TEST_F(BrilloAudioManagerTest, SetMaxVolStepsBASDies) {
-  auto bam = GetValidManager();
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_setMaxVolumeSteps(bam, BAudioUsage::kUsageMedia, 100),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioManagerTest, SetVolIndexInvalidParams) {
-  auto bam = GetValidManager();
-  EXPECT_EQ(BAudioManager_setVolumeIndex(
-                nullptr, BAudioUsage::kUsageMedia, nullptr, 100),
-            EINVAL);
-  EXPECT_EQ(
-      BAudioManager_setVolumeIndex(bam, BAudioUsage::kUsageMedia, nullptr, 100),
-      EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, SetVolIndexWithBAS) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  EXPECT_CALL(
-      *bas_.get(),
-      SetVolumeIndex(AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADPHONE, 100))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(
-      BAudioManager_setVolumeIndex(bam, BAudioUsage::kUsageMedia, device, 100),
-      0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, SetVolIndexBASDies) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(
-      BAudioManager_setVolumeIndex(bam, BAudioUsage::kUsageMedia, device, 100),
-      ECONNABORTED);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, GetVolIndexInvalidParams) {
-  auto bam = GetValidManager();
-  int foo;
-  EXPECT_EQ(BAudioManager_getVolumeIndex(
-                nullptr, BAudioUsage::kUsageMedia, nullptr, nullptr),
-            EINVAL);
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  EXPECT_EQ(BAudioManager_getVolumeIndex(
-                bam, BAudioUsage::kUsageMedia, device, nullptr),
-            EINVAL);
-  EXPECT_EQ(BAudioManager_getVolumeIndex(
-                nullptr, BAudioUsage::kUsageMedia, device, &foo),
-            EINVAL);
-  EXPECT_EQ(BAudioManager_getVolumeIndex(
-                bam, BAudioUsage::kUsageMedia, nullptr, &foo),
-            EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, GetVolIndexWithBAS) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  int foo;
-  EXPECT_CALL(*bas_.get(),
-              GetVolumeIndex(
-                  AUDIO_STREAM_MUSIC, AUDIO_DEVICE_OUT_WIRED_HEADPHONE, &foo))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(
-      BAudioManager_getVolumeIndex(bam, BAudioUsage::kUsageMedia, device, &foo),
-      0);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, GetVolIndexBASDies) {
-  auto bam = GetValidManager();
-  auto device = BAudioDeviceInfo_new(TYPE_WIRED_HEADPHONES);
-  int foo;
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(
-      BAudioManager_getVolumeIndex(bam, BAudioUsage::kUsageMedia, device, &foo),
-      ECONNABORTED);
-  BAudioDeviceInfo_delete(device);
-}
-
-TEST_F(BrilloAudioManagerTest, GetVolumeControlUsageInvalidParams) {
-  auto bam = GetValidManager();
-  BAudioUsage foo;
-  EXPECT_EQ(BAudioManager_getVolumeControlUsage(nullptr, nullptr), EINVAL);
-  EXPECT_EQ(BAudioManager_getVolumeControlUsage(nullptr, &foo), EINVAL);
-  EXPECT_EQ(BAudioManager_getVolumeControlUsage(bam, nullptr), EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, GetVolumeControlStreamWithBAS) {
-  auto bam = GetValidManager();
-  BAudioUsage foo;
-  EXPECT_CALL(*bas_.get(), GetVolumeControlStream(_))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_getVolumeControlUsage(bam, &foo), 0);
-}
-
-TEST_F(BrilloAudioManagerTest, GetVolumeControlStreamBASDies) {
-  auto bam = GetValidManager();
-  BAudioUsage foo;
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_getVolumeControlUsage(bam, &foo), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioManagerTest, SetVolumeControlUsageInvalidParams) {
-  EXPECT_EQ(
-      BAudioManager_setVolumeControlUsage(nullptr, BAudioUsage::kUsageMedia),
-      EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, SetVolumeControlStreamWithBAS) {
-  auto bam = GetValidManager();
-  EXPECT_CALL(*bas_.get(), SetVolumeControlStream(AUDIO_STREAM_MUSIC))
-      .WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_setVolumeControlUsage(bam, BAudioUsage::kUsageMedia),
-            0);
-}
-
-TEST_F(BrilloAudioManagerTest, SetVolumeControlStreamBASDies) {
-  auto bam = GetValidManager();
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_setVolumeControlUsage(bam, BAudioUsage::kUsageMedia),
-            ECONNABORTED);
-}
-
-TEST_F(BrilloAudioManagerTest, DecIncInvalidParams) {
-  EXPECT_EQ(BAudioManager_decrementVolume(nullptr), EINVAL);
-  EXPECT_EQ(BAudioManager_incrementVolume(nullptr), EINVAL);
-}
-
-TEST_F(BrilloAudioManagerTest, IncVolWithBAS) {
-  auto bam = GetValidManager();
-  EXPECT_CALL(*bas_.get(), IncrementVolume()).WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_incrementVolume(bam), 0);
-}
-
-TEST_F(BrilloAudioManagerTest, IncVolBASDies) {
-  auto bam = GetValidManager();
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_incrementVolume(bam), ECONNABORTED);
-}
-
-TEST_F(BrilloAudioManagerTest, DecVolWithBAS) {
-  auto bam = GetValidManager();
-  EXPECT_CALL(*bas_.get(), DecrementVolume()).WillOnce(Return(Status::ok()));
-  EXPECT_EQ(BAudioManager_decrementVolume(bam), 0);
-}
-
-TEST_F(BrilloAudioManagerTest, DecVolBASDies) {
-  auto bam = GetValidManager();
-  binder_wrapper()->NotifyAboutBinderDeath(bas_);
-  EXPECT_EQ(BAudioManager_decrementVolume(bam), ECONNABORTED);
-}
-
-}  // namespace brillo
diff --git a/brillo/audio/audioservice/test/brillo_audio_service_mock.h b/brillo/audio/audioservice/test/brillo_audio_service_mock.h
deleted file mode 100644 (file)
index 4b52ef1..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2016 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-#ifndef BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_MOCK_H_
-#define BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_MOCK_H_
-
-#include <vector>
-
-#include <gmock/gmock.h>
-#include <gtest/gtest_prod.h>
-
-#include "brillo_audio_service.h"
-
-namespace brillo {
-
-class BrilloAudioServiceMock : public BrilloAudioService {
- public:
-  BrilloAudioServiceMock() = default;
-  ~BrilloAudioServiceMock() {}
-
-  MOCK_METHOD2(GetDevices, Status(int flag, std::vector<int>* _aidl_return));
-  MOCK_METHOD2(SetDevice, Status(int usage, int config));
-  MOCK_METHOD2(GetMaxVolumeSteps, Status(int stream, int* _aidl_return));
-  MOCK_METHOD2(SetMaxVolumeSteps, Status(int stream, int max_steps));
-  MOCK_METHOD3(SetVolumeIndex, Status(int stream, int device, int index));
-  MOCK_METHOD3(GetVolumeIndex,
-               Status(int stream, int device, int* _aidl_return));
-  MOCK_METHOD1(GetVolumeControlStream, Status(int* _aidl_return));
-  MOCK_METHOD1(SetVolumeControlStream, Status(int stream));
-  MOCK_METHOD0(IncrementVolume, Status());
-  MOCK_METHOD0(DecrementVolume, Status());
-  MOCK_METHOD1(RegisterServiceCallback,
-               Status(const android::sp<IAudioServiceCallback>& callback));
-  MOCK_METHOD1(UnregisterServiceCallback,
-               Status(const android::sp<IAudioServiceCallback>& callback));
-
-  void RegisterHandlers(std::weak_ptr<AudioDeviceHandler>,
-                        std::weak_ptr<AudioVolumeHandler>){};
-  void OnDevicesConnected(const std::vector<int>&) {}
-  void OnDevicesDisconnected(const std::vector<int>&) {}
-  void OnVolumeChanged(audio_stream_type_t, int, int){};
-};
-
-}  // namespace brillo
-
-#endif  // BRILLO_AUDIO_AUDIOSERVICE_BRILLO_AUDIO_SERVICE_MOCK_H_
index 4b234be..cee3115 100644 (file)
@@ -11,6 +11,7 @@ cc_library_shared {
     include_dirs: ["system/media/private/camera/include"],
     local_include_dirs: ["include"],
     export_include_dirs: ["include"],
+    export_shared_lib_headers: ["libcutils"],
 
     shared_libs: [
         "libcutils",
index b61fcc4..30d3940 100644 (file)
@@ -34,6 +34,9 @@
     % if value.notes:
 ${value.notes | javadoc(metadata)}\
     % endif
+    % if value.sdk_notes:
+${value.sdk_notes | javadoc(metadata)}\
+    % endif
      * @see ${target_class}#${entry.name | jkey_identifier}
     % if entry.applied_visibility in ('hidden', 'ndk_public') or value.hidden:
      * @hide
@@ -42,6 +45,9 @@ ${value.notes | javadoc(metadata)}\
      * @deprecated Please refer to this API documentation to find the alternatives
     % endif
      */
+    % if value.deprecated:
+    @Deprecated
+    % endif
     public static final int ${jenum_value(entry, value)} = ${enum_calculate_value_string(value)};
 
   % endfor
index f9ce15e..6006c8d 100644 (file)
@@ -52,6 +52,7 @@ ${concatenated_info | javadoc(metadata)}\
   % endif
   % if entry.deprecated:
      * @deprecated
+${entry.deprecation_description | javadoc(metadata)}
   % endif
   % if entry.applied_visibility in ('hidden', 'ndk_public'):
      * @hide
diff --git a/camera/docs/HidlMetadata.mako b/camera/docs/HidlMetadata.mako
new file mode 100644 (file)
index 0000000..dcbbde7
--- /dev/null
@@ -0,0 +1,162 @@
+## -*- coding: utf-8 -*-
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+<%!
+  def annotated_type(entry):
+    if entry.enum:
+       type = 'enum'
+    else:
+       type = entry.type
+    if entry.container == 'array':
+       type += '[]'
+
+    return type
+%>\
+
+/*
+ * Autogenerated from camera metadata definitions in
+ * /system/media/camera/docs/metadata_definitions.xml
+ * *** DO NOT EDIT BY HAND ***
+ */
+
+package android.hardware.camera.metadata@${hal_major_version()}.${hal_minor_version()};
+
+% if first_hal_minor_version(hal_major_version()) != hal_minor_version():
+/* Include definitions from all prior minor HAL metadata revisions */
+  % for i in range(first_hal_minor_version(hal_major_version()),hal_minor_version()):
+import android.hardware.camera.metadata@${hal_major_version()}.${i};
+  % endfor
+
+% endif
+<%    gotSections = False %>\
+<%    gotFirstNewSection = False %>\
+% for idx, section in enumerate(find_all_sections_added_in_hal(metadata, hal_major_version(), hal_minor_version())):
+  % if idx == 0:
+<%    gotSections = True %>\
+/**
+ * Top level hierarchy definitions for camera metadata. *_INFO sections are for
+ * the static metadata that can be retrived without opening the camera device.
+ */
+enum CameraMetadataSection : ${'uint32_t' if first_hal_minor_version(hal_major_version()) == hal_minor_version() else '@%d.%d::CameraMetadataSection' % (hal_major_version(), hal_minor_version()-1)} {
+  % endif
+  % if first_hal_minor_version(hal_major_version()) != hal_minor_version() and not gotFirstNewSection:
+    ${path_name(section) | csym} =
+        android.hardware.camera.metadata@${hal_major_version()}.${hal_minor_version()-1}::CameraMetadataSection:ANDROID_SECTION_COUNT,
+<% gotFirstNewSection = True %>\
+  % else:
+    ${path_name(section) | csym},
+  % endif
+
+% endfor
+% if gotSections:
+    ANDROID_SECTION_COUNT${'' if first_hal_minor_version(hal_major_version()) == hal_minor_version() else '_%d_%d' % (hal_major_version(),hal_minor_version())},
+
+    VENDOR_SECTION${'' if first_hal_minor_version(hal_major_version()) == hal_minor_version() else '_%d_%d' % (hal_major_version(),hal_minor_version())} = 0x8000,
+
+};
+
+/**
+ * Hierarchy positions in enum space. All vendor extension sections must be
+ * defined with tag >= VENDOR_SECTION_START
+ */
+enum CameraMetadataSectionStart : ${'uint32_t' if first_hal_minor_version(hal_major_version()) == hal_minor_version() else 'android.hardware.camera.metadata@%d.%d::CameraMetadataSectionStart' % (hal_major_version(), hal_minor_version()-1)} {
+  % for i in find_all_sections_added_in_hal(metadata, hal_major_version(), hal_minor_version()):
+    ${path_name(i) + '.start' | csym} = CameraMetadataSection:${path_name(i) | csym} << 16,
+
+  % endfor
+  % if first_hal_minor_version(hal_major_version()) != hal_minor_version() :
+    VENDOR_SECTION_START${'_%d_%d' % (hal_major_version(),hal_minor_version())} = CameraMetadataSection:VENDOR_SECTION${'_%d_%d' % (hal_major_version(),hal_minor_version())} << 16,
+  % else:
+    VENDOR_SECTION_START = CameraMetadataSection:VENDOR_SECTION << 16,
+  % endif
+
+};
+
+% else:
+// No new metadata sections added in this revision
+
+% endif
+/**
+ * Main enumeration for defining camera metadata tags added in this revision
+ *
+ * <p>Partial documentation is included for each tag; for complete documentation, reference
+ * '/system/media/camera/docs/docs.html' in the corresponding Android source tree.</p>
+ */
+enum CameraMetadataTag : ${'uint32_t' if first_hal_minor_version(hal_major_version()) == hal_minor_version() else '@%d.%d::CameraMetadataTag' % (hal_major_version(), hal_minor_version()-1)} {
+    % for sec in find_all_sections(metadata):
+<%    gotEntries = False %>\
+      % for idx,entry in enumerate(filter_added_in_hal_version(remove_synthetic(find_unique_entries(sec)), hal_major_version(), hal_minor_version())):
+<%      gotEntries = True %>\
+    /** ${entry.name} [${entry.kind}, ${annotated_type(entry)}, ${entry.applied_visibility}]
+        % if entry.description:
+     *
+${entry.description | hidldoc(metadata)}\
+        % endif
+     */
+        % if idx == 0:
+          % if find_first_older_used_hal_version(sec, hal_major_version(), hal_minor_version()) == (0, 0):
+    ${entry.name + " =" | csym} CameraMetadataSectionStart:${path_name(find_parent_section(entry)) | csym}_START,
+          % else:
+<%      prevVersion = find_first_older_used_hal_version(sec, hal_major_version(), hal_minor_version()) %>\
+    ${entry.name + " =" | csym} ${'android.hardware.camera.metadata@%d.%d' % prevVersion}::CameraMetadataTag:${path_name(find_parent_section(entry)) | csym}${'_END' if find_first_older_used_hal_version(sec, prevVersion[0], prevVersion[1]) == (0,0) else '_END_%d_%d' % prevVersion},
+          % endif
+        % else:
+    ${entry.name + "," | csym}
+        % endif
+
+      % endfor
+      % if gotEntries:
+    ${path_name(sec) | csym}${'_END' if first_hal_minor_version(hal_major_version()) == hal_minor_version() else '_END_%d_%d' % (hal_major_version(),hal_minor_version())},
+
+      % endif
+    %endfor
+};
+
+/*
+ * Enumeration definitions for the various entries that need them
+ */
+% for sec in find_all_sections(metadata):
+  % for entry in filter_has_enum_values_added_in_hal_version(remove_synthetic(find_unique_entries(sec)), hal_major_version(), hal_minor_version()):
+    % if entry.enum:
+
+<%    isFirstValue = True %>\
+<%    prevValue = None %>\
+      % for val in entry.enum.values:
+        % if val.hal_major_version == hal_major_version() and val.hal_minor_version == hal_minor_version():
+          % if isFirstValue:
+              % if prevValue is None:
+/** ${entry.name} enumeration values
+              % else:
+/** ${entry.name} enumeration values added since v${prevValue.hal_major_version}.${prevValue.hal_minor_version}
+              % endif
+ * @see ${entry.name | csym}
+ */
+enum CameraMetadataEnum${entry.name | pascal_case} :${' uint32_t' if prevValue is None else '\n        @%d.%d::CameraMetadataEnum%s' % (prevValue.hal_major_version, prevValue.hal_minor_version, pascal_case(entry.name))} {
+          % endif
+          % if val.id is None:
+    ${entry.name | csym}_${val.name},
+          % else:
+    ${'%s_%s'%(csym(entry.name), val.name) | pad(65)} = ${val.id},
+          % endif
+<%        isFirstValue = False %>\
+        % else:
+<%        prevValue = val %>\
+        % endif
+      % endfor
+};
+    % endif
+  % endfor
+% endfor
index d857589..10b8f87 100644 (file)
@@ -98,6 +98,7 @@ message CameraDeviceInfo {
     repeated int32 android_hotPixel_availableHotPixelModes = 458752;
     repeated Size android_jpeg_availableThumbnailSizes = 524288;
     optional int32 android_lens_facing = 589824;
+    optional int32 android_lens_poseReference = 589825;
     repeated float android_lens_info_availableApertures = 655360;
     repeated float android_lens_info_availableFilterDensities = 655361;
     repeated float android_lens_info_availableFocalLengths = 655362;
@@ -145,12 +146,16 @@ message CameraDeviceInfo {
     optional int32 android_statistics_info_maxFaceCount = 1245185;
     repeated bool android_statistics_info_availableHotPixelMapModes = 1245186;
     repeated int32 android_statistics_info_availableLensShadingMapModes = 1245187;
+    repeated int32 android_statistics_info_availableOisDataModes = 1245188;
     optional int32 android_tonemap_maxCurvePoints = 1310720;
     repeated int32 android_tonemap_availableToneMapModes = 1310721;
     optional int32 android_info_supportedHardwareLevel = 1441792;
+    optional string android_info_version = 1441793;
     optional int32 android_sync_maxLatency = 1572864;
     optional int32 android_reprocess_maxCaptureStall = 1638400;
     optional bool android_depth_depthIsExclusive = 1703936;
+    optional int32 android_logicalMultiCamera_sensorSyncType = 1769472;
+    repeated int32 android_distortionCorrection_availableModes = 1835008;
     // End of codegen fields
   }
 
index b950c27..7e951a2 100644 (file)
@@ -77,10 +77,10 @@ typedef enum camera_metadata_tag {
     % for sec in find_all_sections(metadata):
       % for idx,entry in enumerate(remove_synthetic(find_unique_entries(sec))):
         % if idx == 0:
-    ${entry.name + " = " | csym,ljust(50)}// ${annotated_type(entry) | ljust(12)} | ${entry.applied_visibility}
+    ${entry.name + " = " | csym,ljust(50)}// ${annotated_type(entry) | ljust(12)} | ${entry.applied_visibility | ljust(12)} | HIDL v${entry.hal_major_version}.${entry.hal_minor_version}
             ${path_name(find_parent_section(entry)) | csym}_START,
         % else:
-    ${entry.name + "," | csym,ljust(50)}// ${annotated_type(entry) | ljust(12)} | ${entry.applied_visibility}
+    ${entry.name + "," | csym,ljust(50)}// ${annotated_type(entry) | ljust(12)} | ${entry.applied_visibility | ljust(12)} | HIDL v${entry.hal_major_version}.${entry.hal_minor_version}
         % endif
       % endfor
     ${path_name(sec) | csym}_END,
@@ -99,9 +99,9 @@ typedef enum camera_metadata_tag {
 typedef enum camera_metadata_enum_${csym(entry.name).lower()} {
       % for val in entry.enum.values:
         % if val.id is None:
-    ${entry.name | csym}_${val.name},
+    ${entry.name | csym}_${val.name | pad(70)}, // HIDL v${val.hal_major_version}.${val.hal_minor_version}
         % else:
-    ${'%s_%s'%(csym(entry.name), val.name) | pad(65)} = ${val.id},
+    ${'%s_%s'%(csym(entry.name), val.name) | pad(70)} = ${val.id}, // HIDL v${val.hal_major_version}.${val.hal_minor_version}
         % endif
       % endfor
 } camera_metadata_enum_${csym(entry.name).lower()}_t;
index 1f14157..6d9a398 100644 (file)
     .th_units { width: 10% }
     .th_tags { width: 5% }
     .th_details { width: 25% }
-    .th_type { width: 20% }
+    .th_type { width: 17% }
     .th_description { width: 20% }
-    .th_range { width: 10% }
+    .th_range { width: 8% }
+    .th_hal_version { width: 5% }
     td { font-size: 0.9em; }
 
     /* hide the first thead, we need it there only to enforce column sizes */
             ><a href="#dynamic_android.control.postRawSensitivityBoost">android.control.postRawSensitivityBoost</a></li>
             <li
             ><a href="#dynamic_android.control.enableZsl">android.control.enableZsl</a></li>
+            <li
+            ><a href="#dynamic_android.control.afSceneChange">android.control.afSceneChange</a></li>
           </ul>
         </li>
       </ul> <!-- toc_section -->
             <li
             ><a href="#static_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a></li>
             <li
+                class="toc_deprecated"
             ><a href="#static_android.lens.radialDistortion">android.lens.radialDistortion</a></li>
+            <li
+            ><a href="#static_android.lens.poseReference">android.lens.poseReference</a></li>
+            <li
+            ><a href="#static_android.lens.distortion">android.lens.distortion</a></li>
           </ul>
         </li>
         <li>
             <li
             ><a href="#dynamic_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a></li>
             <li
+                class="toc_deprecated"
             ><a href="#dynamic_android.lens.radialDistortion">android.lens.radialDistortion</a></li>
+            <li
+            ><a href="#dynamic_android.lens.distortion">android.lens.distortion</a></li>
           </ul>
         </li>
       </ul> <!-- toc_section -->
             ><a href="#static_android.request.availableResultKeys">android.request.availableResultKeys</a></li>
             <li
             ><a href="#static_android.request.availableCharacteristicsKeys">android.request.availableCharacteristicsKeys</a></li>
+            <li
+            ><a href="#static_android.request.availableSessionKeys">android.request.availableSessionKeys</a></li>
+            <li
+            ><a href="#static_android.request.availablePhysicalCameraRequestKeys">android.request.availablePhysicalCameraRequestKeys</a></li>
           </ul>
         </li>
         <li>
             ><a href="#controls_android.statistics.hotPixelMapMode">android.statistics.hotPixelMapMode</a></li>
             <li
             ><a href="#controls_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a></li>
+            <li
+            ><a href="#controls_android.statistics.oisDataMode">android.statistics.oisDataMode</a></li>
           </ul>
         </li>
         <li>
             ><a href="#static_android.statistics.info.availableHotPixelMapModes">android.statistics.info.availableHotPixelMapModes</a></li>
             <li
             ><a href="#static_android.statistics.info.availableLensShadingMapModes">android.statistics.info.availableLensShadingMapModes</a></li>
+            <li
+            ><a href="#static_android.statistics.info.availableOisDataModes">android.statistics.info.availableOisDataModes</a></li>
 
           </ul>
         </li>
             ><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a></li>
             <li
             ><a href="#dynamic_android.statistics.lensShadingMapMode">android.statistics.lensShadingMapMode</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.oisDataMode">android.statistics.oisDataMode</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.oisTimestamps">android.statistics.oisTimestamps</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.oisXShifts">android.statistics.oisXShifts</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.oisYShifts">android.statistics.oisYShifts</a></li>
+            <li
+            ><a href="#dynamic_android.statistics.oisSamples">android.statistics.oisSamples</a></li>
           </ul>
         </li>
       </ul> <!-- toc_section -->
           <ul class="toc_section">
             <li
             ><a href="#static_android.info.supportedHardwareLevel">android.info.supportedHardwareLevel</a></li>
+            <li
+            ><a href="#static_android.info.version">android.info.version</a></li>
           </ul>
         </li>
       </ul> <!-- toc_section -->
         </li>
       </ul> <!-- toc_section -->
     </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_logicalMultiCamera">logicalMultiCamera</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.logicalMultiCamera.physicalIds">android.logicalMultiCamera.physicalIds</a></li>
+            <li
+            ><a href="#static_android.logicalMultiCamera.sensorSyncType">android.logicalMultiCamera.sensorSyncType</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
+    <li>
+      <span class="toc_section_header"><a href="#section_distortionCorrection">distortionCorrection</a></span>
+      <ul class="toc_section">
+        <li>
+          <span class="toc_kind_header">controls</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#controls_android.distortionCorrection.mode">android.distortionCorrection.mode</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">static</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#static_android.distortionCorrection.availableModes">android.distortionCorrection.availableModes</a></li>
+          </ul>
+        </li>
+        <li>
+          <span class="toc_kind_header">dynamic</span>
+          <ul class="toc_section">
+            <li
+            ><a href="#dynamic_android.distortionCorrection.mode">android.distortionCorrection.mode</a></li>
+          </ul>
+        </li>
+      </ul> <!-- toc_section -->
+    </li>
   </ul>
 
 
         <th class="th_description">Description</th>
         <th class="th_units">Units</th>
         <th class="th_range">Range</th>
+        <th class="th_hal_version">HIDL HAL version</th>
         <th class="th_tags">Tags</th>
       </tr>
     </thead> <!-- so that the first occurrence of thead is not
                          above the first occurrence of tr -->
 <!-- <namespace name="android"> -->
-  <tr><td colspan="6" id="section_colorCorrection" class="section">colorCorrection</td></tr>
+  <tr><td colspan="7" id="section_colorCorrection" class="section">colorCorrection</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">TRANSFORM_MATRIX</span>
+                    <span class="entry_type_enum_name">TRANSFORM_MATRIX (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> matrix
 and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> to do color conversion.<wbr/></p>
 <p>All advanced white balance adjustments (not specified
@@ -1212,7 +1285,7 @@ TRANSFORM_<wbr/>MATRIX is ignored.<wbr/> The camera device will override
 this value to either FAST or HIGH_<wbr/>QUALITY.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Color correction processing must not slow down
 capture rate relative to sensor raw output.<wbr/></p>
 <p>Advanced white balance adjustments above and beyond
@@ -1222,7 +1295,7 @@ the camera device uses the last frame's AWB values
 (or defaults if AWB has never been run).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Color correction processing operates at improved
 quality but the capture rate might be reduced (relative to sensor
 raw output rate)</p>
@@ -1247,15 +1320,19 @@ sensor's native color into linear sRGB color.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When auto-white balance (AWB) is enabled with <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> this
 control is overridden by the AWB routine.<wbr/> When AWB is disabled,<wbr/> the
 application controls how the color mapping is performed.<wbr/></p>
@@ -1298,10 +1375,10 @@ values are clipped to fit within the range.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if color correction control is available
 on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
 That is,<wbr/> if the highest quality implementation on the camera device does not slow down
@@ -1309,7 +1386,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY should generate the same ou
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -1348,15 +1425,19 @@ from sensor RGB color space to output linear sRGB color space.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is either set by the camera device when the request
 <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not TRANSFORM_<wbr/>MATRIX,<wbr/> or
 directly by the application in the request when the
@@ -1372,7 +1453,7 @@ values within [-1.<wbr/>5,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wb
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -1411,15 +1492,19 @@ white-balance.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>These per-channel gains are either set by the camera device
 when the request <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not
 TRANSFORM_<wbr/>MATRIX,<wbr/> or directly by the application in the
@@ -1435,10 +1520,10 @@ this can create color artifacts.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The 4-channel white-balance gains are defined in
 the order of <code>[R G_<wbr/>even G_<wbr/>odd B]</code>,<wbr/> where <code>G_<wbr/>even</code> is the gain
 for green pixels on even rows of the output,<wbr/> and <code>G_<wbr/>odd</code>
@@ -1449,7 +1534,7 @@ channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -1470,16 +1555,16 @@ channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No aberration correction is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Aberration correction will not slow down capture rate
 relative to sensor raw output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Aberration correction operates at improved quality but the capture rate might be
 reduced (relative to sensor raw output rate)</p></span>
                   </li>
@@ -1498,15 +1583,19 @@ reduced (relative to sensor raw output rate)</p></span>
               <p><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
 can not focus on the same point after exiting from the lens.<wbr/> This metadata defines
 the high level control of chromatic aberration correction algorithm,<wbr/> which aims to
@@ -1522,14 +1611,14 @@ applying aberration correction.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -1538,6 +1627,7 @@ applying aberration correction.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -1570,7 +1660,7 @@ applying aberration correction.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">TRANSFORM_MATRIX</span>
+                    <span class="entry_type_enum_name">TRANSFORM_MATRIX (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a> matrix
 and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> to do color conversion.<wbr/></p>
 <p>All advanced white balance adjustments (not specified
@@ -1580,7 +1670,7 @@ TRANSFORM_<wbr/>MATRIX is ignored.<wbr/> The camera device will override
 this value to either FAST or HIGH_<wbr/>QUALITY.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Color correction processing must not slow down
 capture rate relative to sensor raw output.<wbr/></p>
 <p>Advanced white balance adjustments above and beyond
@@ -1590,7 +1680,7 @@ the camera device uses the last frame's AWB values
 (or defaults if AWB has never been run).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Color correction processing operates at improved
 quality but the capture rate might be reduced (relative to sensor
 raw output rate)</p>
@@ -1615,15 +1705,19 @@ sensor's native color into linear sRGB color.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When auto-white balance (AWB) is enabled with <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> this
 control is overridden by the AWB routine.<wbr/> When AWB is disabled,<wbr/> the
 application controls how the color mapping is performed.<wbr/></p>
@@ -1666,10 +1760,10 @@ values are clipped to fit within the range.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if color correction control is available
 on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
 That is,<wbr/> if the highest quality implementation on the camera device does not slow down
@@ -1677,7 +1771,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY should generate the same ou
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -1716,15 +1810,19 @@ from sensor RGB color space to output linear sRGB color space.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is either set by the camera device when the request
 <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not TRANSFORM_<wbr/>MATRIX,<wbr/> or
 directly by the application in the request when the
@@ -1740,7 +1838,7 @@ values within [-1.<wbr/>5,<wbr/> 3.<wbr/>0] are guaranteed not to be clipped.<wb
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -1779,15 +1877,19 @@ white-balance.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>These per-channel gains are either set by the camera device
 when the request <a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a> is not
 TRANSFORM_<wbr/>MATRIX,<wbr/> or directly by the application in the
@@ -1803,10 +1905,10 @@ this can create color artifacts.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The 4-channel white-balance gains are defined in
 the order of <code>[R G_<wbr/>even G_<wbr/>odd B]</code>,<wbr/> where <code>G_<wbr/>even</code> is the gain
 for green pixels on even rows of the output,<wbr/> and <code>G_<wbr/>odd</code>
@@ -1817,7 +1919,7 @@ channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -1838,16 +1940,16 @@ channels,<wbr/> it must use the <code>G_<wbr/>even</code> value,<wbr/> and write
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No aberration correction is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Aberration correction will not slow down capture rate
 relative to sensor raw output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Aberration correction operates at improved quality but the capture rate might be
 reduced (relative to sensor raw output rate)</p></span>
                   </li>
@@ -1866,15 +1968,19 @@ reduced (relative to sensor raw output rate)</p></span>
               <p><a href="#static_android.colorCorrection.availableAberrationModes">android.<wbr/>color<wbr/>Correction.<wbr/>available<wbr/>Aberration<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
 can not focus on the same point after exiting from the lens.<wbr/> This metadata defines
 the high level control of chromatic aberration correction algorithm,<wbr/> which aims to
@@ -1890,14 +1996,14 @@ applying aberration correction.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -1906,6 +2012,7 @@ applying aberration correction.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -1956,6 +2063,10 @@ supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -1964,10 +2075,10 @@ supported by this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This key lists the valid modes for <a href="#controls_android.colorCorrection.aberrationMode">android.<wbr/>color<wbr/>Correction.<wbr/>aberration<wbr/>Mode</a>.<wbr/>  If no
 aberration correction modes are available for a device,<wbr/> this list will solely include
 OFF mode.<wbr/> All camera devices will support either OFF or FAST mode.<wbr/></p>
@@ -1978,10 +2089,10 @@ OFF mode.<wbr/> This includes all FULL level devices.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if chromatic aberration control is available
 on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
 That is,<wbr/> if the highest quality implementation on the camera device does not slow down
@@ -1989,7 +2100,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -1998,10 +2109,10 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_control" class="section">control</td></tr>
+  <tr><td colspan="7" id="section_control" class="section">control</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -2010,6 +2121,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -2042,23 +2154,23 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will not adjust exposure duration to
 avoid banding problems.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">50HZ</span>
+                    <span class="entry_type_enum_name">50HZ (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
 avoid banding problems with 50Hz illumination sources.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">60HZ</span>
+                    <span class="entry_type_enum_name">60HZ (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
 avoid banding problems with 60Hz illumination
 sources.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will automatically adapt its
 antibanding routine to the current illumination
 condition.<wbr/> This is the default mode if AUTO is
@@ -2080,6 +2192,10 @@ algorithm's antibanding compensation.<wbr/></p>
               <p><a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2088,10 +2204,10 @@ algorithm's antibanding compensation.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Some kinds of lighting fixtures,<wbr/> such as some fluorescent
 lights,<wbr/> flicker at the rate of the power supply frequency
 (60Hz or 50Hz,<wbr/> depending on country).<wbr/> While this is
@@ -2125,10 +2241,10 @@ the application in this.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For all capture request templates,<wbr/> this field must be set
 to AUTO if AUTO mode is available.<wbr/> If AUTO is not available,<wbr/>
 the default must be either 50HZ or 60HZ,<wbr/> and both 50HZ and
@@ -2140,7 +2256,7 @@ adjusted for antibanding.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2175,6 +2291,10 @@ brightness.<wbr/></p>
               <p><a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2183,10 +2303,10 @@ brightness.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The adjustment is measured as a count of steps,<wbr/> with the
 step size defined by <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> and the
 allowed range by <a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a>.<wbr/></p>
@@ -2206,7 +2326,7 @@ FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2227,12 +2347,12 @@ FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-exposure lock is disabled; the AE algorithm
 is free to update its parameters.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-exposure lock is enabled; the AE algorithm
 must not update the exposure and sensitivity parameters
 while the lock is active.<wbr/></p>
@@ -2256,6 +2376,10 @@ calculated values.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2264,10 +2388,10 @@ calculated values.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to <code>true</code> (ON),<wbr/> the AE algorithm is locked to its latest parameters,<wbr/>
 and will not change exposure settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
 <p>Note that even when AE is locked,<wbr/> the flash may be fired if
@@ -2306,7 +2430,7 @@ any flicker during the switch,<wbr/> the following procedure is recommended:</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2327,7 +2451,7 @@ any flicker during the switch,<wbr/> the following procedure is recommended:</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is disabled.<wbr/></p>
 <p>The application-selected <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
 <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> and
@@ -2346,7 +2470,7 @@ for more details.<wbr/></p>
 override attempts to use this value to ON.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is active,<wbr/>
 with no flash control.<wbr/></p>
 <p>The application's values for
@@ -2357,7 +2481,7 @@ application has control over the various
 android.<wbr/>flash.<wbr/>* fields.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON_AUTO_FLASH</span>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
 the camera's flash unit,<wbr/> firing it in low-light
 conditions.<wbr/></p>
@@ -2368,7 +2492,7 @@ may be fired for captures for which the
 STILL_<wbr/>CAPTURE</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON_ALWAYS_FLASH</span>
+                    <span class="entry_type_enum_name">ON_ALWAYS_FLASH (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
 the camera's flash unit,<wbr/> always firing it for still
 captures.<wbr/></p>
@@ -2379,13 +2503,25 @@ will always be fired for captures for which the
 STILL_<wbr/>CAPTURE</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE</span>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Like ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> but with automatic red eye
 reduction.<wbr/></p>
 <p>If deemed necessary by the camera device,<wbr/> a red eye
 reduction flash will fire during the precapture
 sequence.<wbr/></p></span>
                   </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_EXTERNAL_FLASH (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>An external flash has been turned on.<wbr/></p>
+<p>It informs the camera device that an external flash has been turned on,<wbr/> and that
+metering (and continuous focus if active) should be quickly recaculated to account
+for the external flash.<wbr/> Otherwise,<wbr/> this mode acts like ON.<wbr/></p>
+<p>When the external flash is turned off,<wbr/> AE mode should be changed to one of the
+other available AE modes.<wbr/></p>
+<p>If the camera device supports AE external flash mode,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> must
+be FLASH_<wbr/>REQUIRED after the camera device finishes AE scan and it's too dark without
+flash.<wbr/></p></span>
+                  </li>
                 </ul>
 
             </td> <!-- entry_type -->
@@ -2402,6 +2538,10 @@ auto-exposure routine.<wbr/></p>
               <p><a href="#static_android.control.aeAvailableModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2410,10 +2550,10 @@ auto-exposure routine.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is
 AUTO.<wbr/></p>
 <p>When set to any of the ON modes,<wbr/> the camera device's
@@ -2436,7 +2576,7 @@ CaptureResult.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2474,6 +2614,10 @@ CaptureResult.<wbr/></p>
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2482,10 +2626,10 @@ CaptureResult.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not available if <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a> is 0.<wbr/>
 Otherwise will always be present.<wbr/></p>
 <p>The maximum number of regions supported by the device is determined by the value
@@ -2514,10 +2658,10 @@ not reported in the result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL level representation of MeteringRectangle[] is a
 int[5 * area_<wbr/>count].<wbr/>
 Every five elements represent a metering region of
@@ -2527,7 +2671,7 @@ exclusive on xmax and ymax.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2567,6 +2711,10 @@ exposure.<wbr/></p>
               <p>Any of the entries in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2575,10 +2723,10 @@ exposure.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only constrains auto-exposure (AE) algorithm,<wbr/> not
 manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> and
 <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>.<wbr/></p>
@@ -2586,7 +2734,7 @@ manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2607,18 +2755,18 @@ manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_name">IDLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_name">START (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The precapture metering sequence will be started
 by the camera device.<wbr/></p>
 <p>The exact effect of the precapture trigger depends on
 the current AE mode and state.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_name">CANCEL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will cancel any currently active or completed
 precapture metering sequence,<wbr/> the auto-exposure routine will return to its
 initial state.<wbr/></p></span>
@@ -2638,6 +2786,10 @@ metering sequence when it processes this request.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2646,10 +2798,10 @@ metering sequence when it processes this request.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is normally set to IDLE,<wbr/> or is not
 included at all in the request settings.<wbr/> When included and
 set to START,<wbr/> the camera device will trigger the auto-exposure (AE)
@@ -2700,10 +2852,10 @@ the camera device will complete them in the optimal order for that device.<wbr/>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must support triggering the AE precapture trigger while an AF trigger is active
 (and vice versa),<wbr/> or at the same time as the AF trigger.<wbr/>  It is acceptable for the HAL to
 treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
@@ -2712,7 +2864,7 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2733,13 +2885,13 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The auto-focus routine does not control the lens;
 <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> is controlled by the
 application.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Basic automatic focus mode.<wbr/></p>
 <p>In this mode,<wbr/> the lens does not move unless
 the autofocus trigger action is called.<wbr/> When that trigger
@@ -2752,7 +2904,7 @@ is fixed-focus.<wbr/></p>
 and sets the AF state to INACTIVE.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MACRO</span>
+                    <span class="entry_type_enum_name">MACRO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Close-up focusing mode.<wbr/></p>
 <p>In this mode,<wbr/> the lens does not move unless the
 autofocus trigger action is called.<wbr/> When that trigger is
@@ -2767,7 +2919,7 @@ position to default,<wbr/> and sets the AF state to
 INACTIVE.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CONTINUOUS_VIDEO</span>
+                    <span class="entry_type_enum_name">CONTINUOUS_VIDEO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
 position continually to attempt to provide a
 constantly-in-focus image stream.<wbr/></p>
@@ -2787,7 +2939,7 @@ ongoing PASSIVE_<wbr/>SCAN must immediately be
 canceled.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CONTINUOUS_PICTURE</span>
+                    <span class="entry_type_enum_name">CONTINUOUS_PICTURE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
 position continually to attempt to provide a
 constantly-in-focus image stream.<wbr/></p>
@@ -2806,7 +2958,7 @@ should transition back to INACTIVE and then act as if it
 has just been started.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">EDOF</span>
+                    <span class="entry_type_enum_name">EDOF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Extended depth of field (digital focus) mode.<wbr/></p>
 <p>The camera device will produce images with an extended
 depth of field automatically; no special focusing
@@ -2830,6 +2982,10 @@ mode it is set to.<wbr/></p>
               <p><a href="#static_android.control.afAvailableModes">android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2838,10 +2994,10 @@ mode it is set to.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> = AUTO and the lens is not fixed focus
 (i.<wbr/>e.<wbr/> <code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> &gt; 0</code>).<wbr/> Also note that
 when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/> the behavior of AF is device
@@ -2854,10 +3010,10 @@ in result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When afMode is AUTO or MACRO,<wbr/> the lens must not move until an AF trigger is sent in a
 request (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> <code>==</code> START).<wbr/> After an AF trigger,<wbr/> the afState will end
 up with either FOCUSED_<wbr/>LOCKED or NOT_<wbr/>FOCUSED_<wbr/>LOCKED state (see
@@ -2881,7 +3037,7 @@ that will arise on camera modules with open-loop VCMs.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2919,6 +3075,10 @@ that will arise on camera modules with open-loop VCMs.<wbr/></p>
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -2927,10 +3087,10 @@ that will arise on camera modules with open-loop VCMs.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not available if <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a> is 0.<wbr/>
 Otherwise will always be present.<wbr/></p>
 <p>The maximum number of focus areas supported by the device is determined by the value
@@ -2949,7 +3109,8 @@ camera device will add the weights in the overlap region.<wbr/></p>
 is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0 weight is
 ignored.<wbr/></p>
 <p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
+camera device.<wbr/> The capture result will either be a zero weight region as well,<wbr/> or
+the region selected by the camera device as the focus area of interest.<wbr/></p>
 <p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
 capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
 region and output only the intersection rectangle as the metering region in the result
@@ -2959,10 +3120,10 @@ not reported in the result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL level representation of MeteringRectangle[] is a
 int[5 * area_<wbr/>count].<wbr/>
 Every five elements represent a metering region of
@@ -2972,7 +3133,7 @@ exclusive on xmax and ymax.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -2993,15 +3154,15 @@ exclusive on xmax and ymax.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_name">IDLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_name">START (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Autofocus will trigger now.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_name">CANCEL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Autofocus will return to its initial
 state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
                   </li>
@@ -3019,6 +3180,10 @@ state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -3027,10 +3192,10 @@ state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is normally set to IDLE,<wbr/> or is not
 included at all in the request settings.<wbr/></p>
 <p>When included and set to START,<wbr/> the camera device will trigger the
@@ -3052,10 +3217,10 @@ changes to <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must support triggering the AF trigger while an AE precapture trigger is active
 (and vice versa),<wbr/> or at the same time as the AE trigger.<wbr/>  It is acceptable for the HAL to
 treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
@@ -3064,7 +3229,7 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -3085,13 +3250,13 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-white balance lock is disabled; the AWB
 algorithm is free to update its parameters if in AUTO
 mode.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-white balance lock is enabled; the AWB
 algorithm will not update its parameters while the lock
 is active.<wbr/></p></span>
@@ -3111,6 +3276,10 @@ latest calculated values.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -3119,10 +3288,10 @@ latest calculated values.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to <code>true</code> (ON),<wbr/> the AWB algorithm is locked to its latest parameters,<wbr/>
 and will not change color balance settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
 <p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
@@ -3146,7 +3315,7 @@ AWB is already fixed to a specific setting.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -3167,7 +3336,7 @@ AWB is already fixed to a specific setting.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled.<wbr/></p>
 <p>The application-selected color transform matrix
 (<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>) and gains
@@ -3175,7 +3344,7 @@ AWB is already fixed to a specific setting.<wbr/></p>
 device for manual white balance control.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is active.<wbr/></p>
 <p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
 and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
@@ -3184,7 +3353,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">INCANDESCENT</span>
+                    <span class="entry_type_enum_name">INCANDESCENT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses incandescent light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -3198,7 +3367,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FLUORESCENT</span>
+                    <span class="entry_type_enum_name">FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses fluorescent light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -3212,7 +3381,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">WARM_FLUORESCENT</span>
+                    <span class="entry_type_enum_name">WARM_FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses warm fluorescent light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -3226,7 +3395,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DAYLIGHT</span>
+                    <span class="entry_type_enum_name">DAYLIGHT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses daylight light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -3240,7 +3409,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CLOUDY_DAYLIGHT</span>
+                    <span class="entry_type_enum_name">CLOUDY_DAYLIGHT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses cloudy daylight light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -3251,7 +3420,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TWILIGHT</span>
+                    <span class="entry_type_enum_name">TWILIGHT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses twilight light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -3262,7 +3431,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SHADE</span>
+                    <span class="entry_type_enum_name">SHADE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses shade light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -3289,6 +3458,10 @@ is.<wbr/></p>
               <p><a href="#static_android.control.awbAvailableModes">android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -3297,10 +3470,10 @@ is.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is AUTO.<wbr/></p>
 <p>When set to the ON mode,<wbr/> the camera device's auto-white balance
 routine is enabled,<wbr/> overriding the application's selected
@@ -3324,7 +3497,7 @@ adjustment.<wbr/> The application's values for
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -3363,6 +3536,10 @@ estimation.<wbr/></p>
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -3371,10 +3548,10 @@ estimation.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not available if <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a> is 0.<wbr/>
 Otherwise will always be present.<wbr/></p>
 <p>The maximum number of regions supported by the device is determined by the value
@@ -3403,10 +3580,10 @@ not reported in the result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL level representation of MeteringRectangle[] is a
 int[5 * area_<wbr/>count].<wbr/>
 Every five elements represent a metering region of
@@ -3416,7 +3593,7 @@ exclusive on xmax and ymax.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -3437,30 +3614,30 @@ exclusive on xmax and ymax.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">CUSTOM</span>
+                    <span class="entry_type_enum_name">CUSTOM (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The goal of this request doesn't fall into the other
 categories.<wbr/> The camera device will default to preview-like
 behavior.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PREVIEW</span>
+                    <span class="entry_type_enum_name">PREVIEW (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a preview-like use case.<wbr/></p>
 <p>The precapture trigger may be used to start off a metering
 w/<wbr/>flash sequence.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">STILL_CAPTURE</span>
+                    <span class="entry_type_enum_name">STILL_CAPTURE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a still capture-type
 use case.<wbr/></p>
 <p>If the flash unit is under automatic control,<wbr/> it may fire as needed.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">VIDEO_RECORD</span>
+                    <span class="entry_type_enum_name">VIDEO_RECORD (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a video recording
 use case.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">VIDEO_SNAPSHOT</span>
+                    <span class="entry_type_enum_name">VIDEO_SNAPSHOT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a video snapshot (still
 image while recording video) use case.<wbr/></p>
 <p>The camera device should take the highest-quality image
@@ -3468,19 +3645,27 @@ possible (given the other settings) without disrupting the
 frame rate of video recording.<wbr/>  </p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a ZSL usecase; the
 application will stream full-resolution images and
 reprocess one or several later for a final
 capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MANUAL</span>
+                    <span class="entry_type_enum_name">MANUAL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for manual capture use case where
 the applications want to directly control the capture parameters.<wbr/></p>
 <p>For example,<wbr/> the application may wish to manually control
 <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> etc.<wbr/></p></span>
                   </li>
+                  <li>
+                    <span class="entry_type_enum_name">MOTION_TRACKING (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a motion tracking use case,<wbr/> where
+the application will use camera and inertial sensor data to
+locate and track objects in the world.<wbr/></p>
+<p>The camera device auto-exposure routine will limit the exposure time
+of the camera to no more than 20 milliseconds,<wbr/> to minimize motion blur.<wbr/></p></span>
+                  </li>
                 </ul>
 
             </td> <!-- entry_type -->
@@ -3498,6 +3683,10 @@ strategy.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -3506,21 +3695,24 @@ strategy.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control (except for MANUAL) is only effective if
 <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF</code> and any 3A routine is active.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
-contains PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/> MANUAL will be supported if
-<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains MANUAL_<wbr/>SENSOR.<wbr/> Other intent values are
-always supported.<wbr/></p>
+<p>All intents are supported by all devices,<wbr/> except that:
+  * ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
+PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/>
+  * MANUAL will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
+MANUAL_<wbr/>SENSOR.<wbr/>
+  * MOTION_<wbr/>TRACKING will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
+MOTION_<wbr/>TRACKING.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -3541,56 +3733,56 @@ always supported.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No color effect will be applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MONO</span>
+                    <span class="entry_type_enum_name">MONO (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "monocolor" effect where the image is mapped into
 a single color.<wbr/></p>
 <p>This will typically be grayscale.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">NEGATIVE</span>
+                    <span class="entry_type_enum_name">NEGATIVE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "photo-negative" effect where the image's colors
 are inverted.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SOLARIZE</span>
+                    <span class="entry_type_enum_name">SOLARIZE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "solarisation" effect (Sabattier effect) where the
 image is wholly or partially reversed in
 tone.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SEPIA</span>
+                    <span class="entry_type_enum_name">SEPIA (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "sepia" effect where the image is mapped into warm
 gray,<wbr/> red,<wbr/> and brown tones.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">POSTERIZE</span>
+                    <span class="entry_type_enum_name">POSTERIZE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "posterization" effect where the image uses
 discrete regions of tone rather than a continuous
 gradient of tones.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">WHITEBOARD</span>
+                    <span class="entry_type_enum_name">WHITEBOARD (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "whiteboard" effect where the image is typically displayed
 as regions of white,<wbr/> with black or grey details.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BLACKBOARD</span>
+                    <span class="entry_type_enum_name">BLACKBOARD (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "blackboard" effect where the image is typically displayed
 as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AQUA</span>
+                    <span class="entry_type_enum_name">AQUA (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>An "aqua" effect where a blue hue is added to the image.<wbr/></p></span>
                   </li>
@@ -3609,6 +3801,10 @@ as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
               <p><a href="#static_android.control.availableEffects">android.<wbr/>control.<wbr/>available<wbr/>Effects</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -3617,10 +3813,10 @@ as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When this mode is set,<wbr/> a color effect will be applied
 to images produced by the camera device.<wbr/> The interpretation
 and implementation of these color effects is left to the
@@ -3631,7 +3827,7 @@ devices.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -3652,7 +3848,7 @@ devices.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Full application control of pipeline.<wbr/></p>
 <p>All control by the device's metering and focusing (3A)
 routines is disabled,<wbr/> and no other settings in
@@ -3667,14 +3863,14 @@ when control is switched to AUTO mode,<wbr/> good control values
 can be immediately applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use settings for each individual 3A routine.<wbr/></p>
 <p>Manual control of capture parameters is disabled.<wbr/> All
 controls in android.<wbr/>control.<wbr/>* besides sceneMode take
 effect.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">USE_SCENE_MODE</span>
+                    <span class="entry_type_enum_name">USE_SCENE_MODE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Use a specific scene mode.<wbr/></p>
 <p>Enabling this disables control.<wbr/>aeMode,<wbr/> control.<wbr/>awbMode and
@@ -3686,7 +3882,7 @@ This setting can only be used if scene mode is supported (i.<wbr/>e.<wbr/>
 contain some modes other than DISABLED).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">OFF_KEEP_STATE</span>
+                    <span class="entry_type_enum_name">OFF_KEEP_STATE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Same as OFF mode,<wbr/> except that this capture will not be
 used by camera device background auto-exposure,<wbr/> auto-white balance and
@@ -3713,6 +3909,10 @@ routines.<wbr/></p>
               <p><a href="#static_android.control.availableModes">android.<wbr/>control.<wbr/>available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -3721,20 +3921,20 @@ routines.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is a top-level 3A control switch.<wbr/> When set to OFF,<wbr/> all 3A control
 by the camera device is disabled.<wbr/> The application must set the fields for
 capture parameters itself.<wbr/></p>
 <p>When set to AUTO,<wbr/> the individual algorithm controls in
 android.<wbr/>control.<wbr/>* are in effect,<wbr/> such as <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>.<wbr/></p>
 <p>When set to USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> the individual controls in
-android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device implements
-one of the scene mode settings (such as ACTION,<wbr/> SUNSET,<wbr/> or PARTY)
-as it wishes.<wbr/> The camera device scene mode 3A settings are provided by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
+android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device
+implements one of the scene mode settings (such as ACTION,<wbr/>
+SUNSET,<wbr/> or PARTY) as it wishes.<wbr/> The camera device scene mode
+3A settings are provided by <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
 <p>When set to OFF_<wbr/>KEEP_<wbr/>STATE,<wbr/> it is similar to OFF mode,<wbr/> the only difference
 is that this frame will not be used by camera device background 3A statistics
 update,<wbr/> as if this frame is never captured.<wbr/> This mode can be used in the scenario
@@ -3744,7 +3944,7 @@ the subsequent auto 3A capture results.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -3765,12 +3965,12 @@ the subsequent auto 3A capture results.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">DISABLED</span>
+                    <span class="entry_type_enum_name">DISABLED (v3.2)</span>
                     <span class="entry_type_enum_value">0</span>
                     <span class="entry_type_enum_notes"><p>Indicates that no scene modes are set for a given capture request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FACE_PRIORITY</span>
+                    <span class="entry_type_enum_name">FACE_PRIORITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>If face detection support exists,<wbr/> use face
 detection data for auto-focus,<wbr/> auto-white balance,<wbr/> and
 auto-exposure routines.<wbr/></p>
@@ -3783,91 +3983,91 @@ face detection statistics to the framework).<wbr/></p>
 remain active when FACE_<wbr/>PRIORITY is set.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ACTION</span>
+                    <span class="entry_type_enum_name">ACTION (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving objects.<wbr/></p>
 <p>Similar to SPORTS.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PORTRAIT</span>
+                    <span class="entry_type_enum_name">PORTRAIT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for still photos of people.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">LANDSCAPE</span>
+                    <span class="entry_type_enum_name">LANDSCAPE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for photos of distant macroscopic objects.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">NIGHT</span>
+                    <span class="entry_type_enum_name">NIGHT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for low-light settings.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">NIGHT_PORTRAIT</span>
+                    <span class="entry_type_enum_name">NIGHT_PORTRAIT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for still photos of people in low-light
 settings.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">THEATRE</span>
+                    <span class="entry_type_enum_name">THEATRE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings where flash must
 remain off.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BEACH</span>
+                    <span class="entry_type_enum_name">BEACH (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor beach settings.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SNOW</span>
+                    <span class="entry_type_enum_name">SNOW (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor settings containing snow.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SUNSET</span>
+                    <span class="entry_type_enum_name">SUNSET (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for scenes of the setting sun.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">STEADYPHOTO</span>
+                    <span class="entry_type_enum_name">STEADYPHOTO (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized to avoid blurry photos due to small amounts of
 device motion (for example: due to hand shake).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FIREWORKS</span>
+                    <span class="entry_type_enum_name">FIREWORKS (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for nighttime photos of fireworks.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SPORTS</span>
+                    <span class="entry_type_enum_name">SPORTS (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving people.<wbr/></p>
 <p>Similar to ACTION.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PARTY</span>
+                    <span class="entry_type_enum_name">PARTY (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings with multiple moving
 people.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CANDLELIGHT</span>
+                    <span class="entry_type_enum_name">CANDLELIGHT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for dim settings where the main light source
 is a flame.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BARCODE</span>
+                    <span class="entry_type_enum_name">BARCODE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for accurately capturing a photo of barcode
 for use by camera applications that wish to read the
 barcode value.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_SPEED_VIDEO</span>
+                    <span class="entry_type_enum_name">HIGH_SPEED_VIDEO (v3.2)</span>
                     <span class="entry_type_enum_deprecated">[deprecated]</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>This is deprecated,<wbr/> please use <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
@@ -3932,7 +4132,7 @@ reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommend
 the application avoids unnecessary scene mode switch as much as possible.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HDR</span>
+                    <span class="entry_type_enum_name">HDR (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Turn on a device-specific high dynamic range (HDR) mode.<wbr/></p>
 <p>In this scene mode,<wbr/> the camera device captures images
@@ -3977,7 +4177,7 @@ or capture intents,<wbr/> the images captured will be as if
 the SCENE_<wbr/>MODE was not enabled at all.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT</span>
+                    <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_hidden">[hidden]</span>
                     <span class="entry_type_enum_notes"><p>Same as FACE_<wbr/>PRIORITY scene mode,<wbr/> except that the camera
@@ -4003,7 +4203,7 @@ reducing the noise level of the captured images.<wbr/></p>
 remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DEVICE_CUSTOM_START</span>
+                    <span class="entry_type_enum_name">DEVICE_CUSTOM_START (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_hidden">[hidden]</span>
                     <span class="entry_type_enum_value">100</span>
@@ -4012,7 +4212,7 @@ remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></
 customized scene modes.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DEVICE_CUSTOM_END</span>
+                    <span class="entry_type_enum_name">DEVICE_CUSTOM_END (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_hidden">[hidden]</span>
                     <span class="entry_type_enum_value">127</span>
@@ -4035,6 +4235,10 @@ customized scene modes.<wbr/></p></span>
               <p><a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4043,10 +4247,10 @@ customized scene modes.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Scene modes are custom camera modes optimized for a certain set of conditions and
 capture settings.<wbr/></p>
 <p>This is the mode that that is active when
@@ -4061,27 +4265,27 @@ a subset of these modes.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL implementations that include scene modes are expected to provide
 the per-scene settings to use for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
 <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> in
 <a href="#static_android.control.sceneModeOverrides">android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides</a>.<wbr/></p>
-<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/>
-the HAL must list supported video size and fps range in
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>
-1280x720,<wbr/> if the HAL has two different sensor configurations for normal streaming
-mode and high speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture
-requests,<wbr/> the HAL may have to switch between different sensor modes.<wbr/>
-This mode is deprecated in HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
+<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/> the
+HAL must list supported video size and fps range in
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>  1280x720,<wbr/>
+if the HAL has two different sensor configurations for normal streaming mode and high
+speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture requests,<wbr/> the
+HAL may have to switch between different sensor modes.<wbr/>  This mode is deprecated in legacy
+HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
 <a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a> and CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO
 capbility defined in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4102,11 +4306,11 @@ capbility defined in <a href="#static_android.request.availableCapabilities">and
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Video stabilization is disabled.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Video stabilization is enabled.<wbr/></p></span>
                   </li>
                 </ul>
@@ -4124,6 +4328,10 @@ active.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4132,10 +4340,10 @@ active.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Video stabilization automatically warps images from
 the camera in order to stabilize motion between consecutive frames.<wbr/></p>
 <p>If enabled,<wbr/> video stabilization can modify the
@@ -4163,7 +4371,7 @@ both at the same time.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4197,15 +4405,19 @@ after RAW sensor data is captured.<wbr/></p>
               <p><a href="#static_android.control.postRawSensitivityBoostRange">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Some camera devices support additional digital sensitivity boosting in the
 camera processing pipeline after sensor RAW image is captured.<wbr/>
 Such a boost will be applied to YUV/<wbr/>JPEG format output images but will not
@@ -4227,7 +4439,7 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4247,12 +4459,12 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FALSE</span>
+                    <span class="entry_type_enum_name">FALSE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Requests with <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE must be captured
 after previous requests.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TRUE</span>
+                    <span class="entry_type_enum_name">TRUE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Requests with <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE may or may not be
 captured before previous requests.<wbr/></p></span>
                   </li>
@@ -4271,15 +4483,19 @@ captured before previous requests.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If enableZsl is <code>true</code>,<wbr/> the camera device may enable zero-shutter-lag mode for requests with
 STILL_<wbr/>CAPTURE capture intent.<wbr/> The camera device may use images captured in the past to
 produce output images for a zero-shutter-lag request.<wbr/> The result metadata including the
@@ -4305,23 +4521,23 @@ capture templates is always <code>false</code> if present.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>It is valid for HAL to produce regular output images for requests with STILL_<wbr/>CAPTURE
 capture intent.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -4330,6 +4546,7 @@ capture intent.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -4380,6 +4597,10 @@ supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.aeAntibandingMode">android.<wbr/>control.<wbr/>ae<wbr/>Antibanding<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4388,10 +4609,10 @@ supported by this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not all of the auto-exposure anti-banding modes may be
 supported by a given camera device.<wbr/> This field lists the
 valid anti-banding modes that the application may request
@@ -4401,7 +4622,7 @@ for this camera device with the
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4440,6 +4661,10 @@ device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4448,10 +4673,10 @@ device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not all the auto-exposure modes may be supported by a
 given camera device,<wbr/> especially if no flash unit is
 available.<wbr/> This entry lists the valid modes for
@@ -4468,7 +4693,7 @@ capability.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4507,6 +4732,10 @@ this camera device.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4515,10 +4744,10 @@ this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For devices at the LEGACY level or above:</p>
 <ul>
 <li>
@@ -4550,7 +4779,7 @@ maximum YUV_<wbr/>420_<wbr/>888 output size.<wbr/></li>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4594,6 +4823,10 @@ compensation is supported (<code>range != [0,<wbr/> 0]</code>):</p>
 <p>LEGACY devices may support a smaller range than this.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4603,7 +4836,7 @@ compensation is supported (<code>range != [0,<wbr/> 0]</code>):</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4637,6 +4870,10 @@ can be changed.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4645,10 +4882,10 @@ can be changed.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is the unit for <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a>.<wbr/> For example,<wbr/> if this key has
 a value of <code>1/<wbr/>2</code>,<wbr/> then a setting of <code>-2</code> for <a href="#controls_android.control.aeExposureCompensation">android.<wbr/>control.<wbr/>ae<wbr/>Exposure<wbr/>Compensation</a> means
 that the target EV offset for the auto-exposure routine is -1 EV.<wbr/></p>
@@ -4658,15 +4895,15 @@ of two.<wbr/> +1 EV doubles the image brightness,<wbr/> while -1 EV halves the i
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This must be less than or equal to 1/<wbr/>2.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4705,6 +4942,10 @@ supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4713,10 +4954,10 @@ supported by this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not all the auto-focus modes may be supported by a
 given camera device.<wbr/> This entry lists the valid modes for
 <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> for this camera device.<wbr/></p>
@@ -4730,7 +4971,7 @@ focusing to infinity (by also setting <a href="#controls_android.lens.focusDista
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4769,6 +5010,10 @@ device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.effectMode">android.<wbr/>control.<wbr/>effect<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4777,10 +5022,10 @@ device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This list contains the color effect modes that can be applied to
 images produced by the camera device.<wbr/>
 Implementations are not expected to be consistent across all devices.<wbr/>
@@ -4794,7 +5039,7 @@ as auto-exposure,<wbr/> white balance,<wbr/> or focus.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4833,6 +5078,10 @@ device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4841,10 +5090,10 @@ device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This list contains scene modes that can be set for the camera device.<wbr/>
 Only scene modes that have been fully implemented for the
 camera device may be included here.<wbr/> Implementations are not expected
@@ -4858,7 +5107,7 @@ supported (i.<wbr/>e.<wbr/><code><a href="#static_android.statistics.info.maxFac
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4897,6 +5146,10 @@ that are supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4905,16 +5158,16 @@ that are supported by this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>OFF will always be listed.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -4953,6 +5206,10 @@ camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -4961,10 +5218,10 @@ camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not all the auto-white-balance modes may be supported by a
 given camera device.<wbr/> This entry lists the valid modes for
 <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> for this camera device.<wbr/></p>
@@ -4977,7 +5234,7 @@ mode camera devices.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5020,6 +5277,10 @@ this value must be &gt;= 1 for AE and AF.<wbr/> The order of the elements is:
 <code>(AE,<wbr/> AWB,<wbr/> AF)</code>.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -5029,7 +5290,7 @@ this value must be &gt;= 1 for AE and AF.<wbr/> The order of the elements is:
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5065,31 +5326,35 @@ routine.<wbr/></p>
 value will be &gt;= 1.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This corresponds to the the maximum allowed number of elements in
 <a href="#controls_android.control.aeRegions">android.<wbr/>control.<wbr/>ae<wbr/>Regions</a>.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is private to the framework.<wbr/> Fill in
 maxRegions to have this entry be automatically populated.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5124,31 +5389,35 @@ routine.<wbr/></p>
               <p>Value will be &gt;= 0.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This corresponds to the the maximum allowed number of elements in
 <a href="#controls_android.control.awbRegions">android.<wbr/>control.<wbr/>awb<wbr/>Regions</a>.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is private to the framework.<wbr/> Fill in
 maxRegions to have this entry be automatically populated.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5183,31 +5452,35 @@ maxRegions to have this entry be automatically populated.<wbr/></p>
 value will be &gt;= 1.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This corresponds to the the maximum allowed number of elements in
 <a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a>.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is private to the framework.<wbr/> Fill in
 maxRegions to have this entry be automatically populated.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5249,6 +5522,10 @@ by the camera device.<wbr/> The entry order is <code>(aeMode,<wbr/> awbMode,<wbr
 where aeMode has the lowest index position.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -5257,10 +5534,10 @@ where aeMode has the lowest index position.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When a scene mode is enabled,<wbr/> the camera device is expected
 to override <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/> <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/>
 and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> with its preferred settings for
@@ -5282,10 +5559,10 @@ ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> INCANDESCENT,<wbr/> AUTO)</code>.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>To maintain backward compatibility,<wbr/> this list will be made available
 in the static metadata of the camera service.<wbr/>  The camera service will
 use these values to set <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
@@ -5294,7 +5571,7 @@ mode other than FACE_<wbr/>PRIORITY and FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGH
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5332,6 +5609,10 @@ supported by the camera device,<wbr/> in the format of (width,<wbr/> height,<wbr
               <p>For each configuration,<wbr/> the fps_<wbr/>max &gt;= 120fps.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -5340,10 +5621,10 @@ supported by the camera device,<wbr/> in the format of (width,<wbr/> height,<wbr
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO is supported in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>,<wbr/>
 this metadata will list the supported high speed video size,<wbr/> fps range and max batch size
 configurations.<wbr/> All the sizes listed in this configuration will be a subset of the sizes
@@ -5387,10 +5668,10 @@ normal capture session,<wbr/> or it will cause request error.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All the sizes listed in this configuration will be a subset of the sizes reported by
 <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> for processed non-stalling output formats.<wbr/>
 Note that for all high speed video configurations,<wbr/> HAL must be able to support a minimum
@@ -5411,7 +5692,7 @@ switch the sensor mode as quick as possible to minimize the mode switch caused s
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5432,10 +5713,10 @@ switch the sensor mode as quick as possible to minimize the mode switch caused s
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FALSE</span>
+                    <span class="entry_type_enum_name">FALSE (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TRUE</span>
+                    <span class="entry_type_enum_name">TRUE (v3.2)</span>
                   </li>
                 </ul>
 
@@ -5451,6 +5732,10 @@ switch the sensor mode as quick as possible to minimize the mode switch caused s
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -5459,17 +5744,17 @@ switch the sensor mode as quick as possible to minimize the mode switch caused s
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Devices with MANUAL_<wbr/>SENSOR capability or BURST_<wbr/>CAPTURE capability will always
 list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5490,10 +5775,10 @@ list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FALSE</span>
+                    <span class="entry_type_enum_name">FALSE (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TRUE</span>
+                    <span class="entry_type_enum_name">TRUE (v3.2)</span>
                   </li>
                 </ul>
 
@@ -5509,6 +5794,10 @@ list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -5517,17 +5806,17 @@ list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Devices with MANUAL_<wbr/>POST_<wbr/>PROCESSING capability or BURST_<wbr/>CAPTURE capability will
 always list <code>true</code>.<wbr/> This includes FULL devices.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5566,15 +5855,19 @@ device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This list contains control modes that can be set for the camera device.<wbr/>
 LEGACY mode devices will always support AUTO mode.<wbr/> LIMITED and FULL
 devices will always support OFF,<wbr/> AUTO modes.<wbr/></p>
@@ -5582,7 +5875,7 @@ devices will always support OFF,<wbr/> AUTO modes.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5620,15 +5913,19 @@ by this camera device.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Devices support post RAW sensitivity boost  will advertise
 <a href="#controls_android.control.postRawSensitivityBoost">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost</a> key for controling
 post RAW sensitivity boost.<wbr/></p>
@@ -5640,25 +5937,25 @@ list <code>(100,<wbr/> 100)</code> in this key.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>This key is added in HAL3.<wbr/>4.<wbr/> For HAL3.<wbr/>3 or earlier devices,<wbr/> camera framework will
-generate this key as <code>(100,<wbr/> 100)</code> if device supports any of RAW output formats.<wbr/>
-All HAL3.<wbr/>4 and above devices should list this key if device supports any of RAW
-output formats.<wbr/></p>
+            <td class="entry_details" colspan="6">
+              <p>This key is added in legacy HAL3.<wbr/>4.<wbr/> For legacy HAL3.<wbr/>3 or earlier devices,<wbr/> camera
+framework will generate this key as <code>(100,<wbr/> 100)</code> if device supports any of RAW output
+formats.<wbr/>  All legacy HAL3.<wbr/>4 and above devices should list this key if device supports
+any of RAW output formats.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -5667,6 +5964,7 @@ output formats.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -5713,15 +6011,19 @@ CAMERA2_<wbr/>TRIGGER_<wbr/>PRECAPTURE_<wbr/>METERING call</p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Must be 0 if no
 CAMERA2_<wbr/>TRIGGER_<wbr/>PRECAPTURE_<wbr/>METERING trigger received yet
 by HAL.<wbr/> Always updated even if AE algorithm ignores the
@@ -5730,7 +6032,7 @@ trigger</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5751,23 +6053,23 @@ trigger</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will not adjust exposure duration to
 avoid banding problems.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">50HZ</span>
+                    <span class="entry_type_enum_name">50HZ (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
 avoid banding problems with 50Hz illumination sources.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">60HZ</span>
+                    <span class="entry_type_enum_name">60HZ (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will adjust exposure duration to
 avoid banding problems with 60Hz illumination
 sources.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will automatically adapt its
 antibanding routine to the current illumination
 condition.<wbr/> This is the default mode if AUTO is
@@ -5789,6 +6091,10 @@ algorithm's antibanding compensation.<wbr/></p>
               <p><a href="#static_android.control.aeAvailableAntibandingModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Antibanding<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -5797,10 +6103,10 @@ algorithm's antibanding compensation.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Some kinds of lighting fixtures,<wbr/> such as some fluorescent
 lights,<wbr/> flicker at the rate of the power supply frequency
 (60Hz or 50Hz,<wbr/> depending on country).<wbr/> While this is
@@ -5834,10 +6140,10 @@ the application in this.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For all capture request templates,<wbr/> this field must be set
 to AUTO if AUTO mode is available.<wbr/> If AUTO is not available,<wbr/>
 the default must be either 50HZ or 60HZ,<wbr/> and both 50HZ and
@@ -5849,7 +6155,7 @@ adjusted for antibanding.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5884,6 +6190,10 @@ brightness.<wbr/></p>
               <p><a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -5892,10 +6202,10 @@ brightness.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The adjustment is measured as a count of steps,<wbr/> with the
 step size defined by <a href="#static_android.control.aeCompensationStep">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Step</a> and the
 allowed range by <a href="#static_android.control.aeCompensationRange">android.<wbr/>control.<wbr/>ae<wbr/>Compensation<wbr/>Range</a>.<wbr/></p>
@@ -5915,7 +6225,7 @@ FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -5936,12 +6246,12 @@ FLASH_<wbr/>REQUIRED (if the scene is too dark for still capture).<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-exposure lock is disabled; the AE algorithm
 is free to update its parameters.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-exposure lock is enabled; the AE algorithm
 must not update the exposure and sensitivity parameters
 while the lock is active.<wbr/></p>
@@ -5965,6 +6275,10 @@ calculated values.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -5973,10 +6287,10 @@ calculated values.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to <code>true</code> (ON),<wbr/> the AE algorithm is locked to its latest parameters,<wbr/>
 and will not change exposure settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
 <p>Note that even when AE is locked,<wbr/> the flash may be fired if
@@ -6015,7 +6329,7 @@ any flicker during the switch,<wbr/> the following procedure is recommended:</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -6036,7 +6350,7 @@ any flicker during the switch,<wbr/> the following procedure is recommended:</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is disabled.<wbr/></p>
 <p>The application-selected <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/>
 <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> and
@@ -6055,7 +6369,7 @@ for more details.<wbr/></p>
 override attempts to use this value to ON.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's autoexposure routine is active,<wbr/>
 with no flash control.<wbr/></p>
 <p>The application's values for
@@ -6066,7 +6380,7 @@ application has control over the various
 android.<wbr/>flash.<wbr/>* fields.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON_AUTO_FLASH</span>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
 the camera's flash unit,<wbr/> firing it in low-light
 conditions.<wbr/></p>
@@ -6077,7 +6391,7 @@ may be fired for captures for which the
 STILL_<wbr/>CAPTURE</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON_ALWAYS_FLASH</span>
+                    <span class="entry_type_enum_name">ON_ALWAYS_FLASH (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Like ON,<wbr/> except that the camera device also controls
 the camera's flash unit,<wbr/> always firing it for still
 captures.<wbr/></p>
@@ -6088,13 +6402,25 @@ will always be fired for captures for which the
 STILL_<wbr/>CAPTURE</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE</span>
+                    <span class="entry_type_enum_name">ON_AUTO_FLASH_REDEYE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Like ON_<wbr/>AUTO_<wbr/>FLASH,<wbr/> but with automatic red eye
 reduction.<wbr/></p>
 <p>If deemed necessary by the camera device,<wbr/> a red eye
 reduction flash will fire during the precapture
 sequence.<wbr/></p></span>
                   </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON_EXTERNAL_FLASH (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>An external flash has been turned on.<wbr/></p>
+<p>It informs the camera device that an external flash has been turned on,<wbr/> and that
+metering (and continuous focus if active) should be quickly recaculated to account
+for the external flash.<wbr/> Otherwise,<wbr/> this mode acts like ON.<wbr/></p>
+<p>When the external flash is turned off,<wbr/> AE mode should be changed to one of the
+other available AE modes.<wbr/></p>
+<p>If the camera device supports AE external flash mode,<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> must
+be FLASH_<wbr/>REQUIRED after the camera device finishes AE scan and it's too dark without
+flash.<wbr/></p></span>
+                  </li>
                 </ul>
 
             </td> <!-- entry_type -->
@@ -6111,6 +6437,10 @@ auto-exposure routine.<wbr/></p>
               <p><a href="#static_android.control.aeAvailableModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -6119,10 +6449,10 @@ auto-exposure routine.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is
 AUTO.<wbr/></p>
 <p>When set to any of the ON modes,<wbr/> the camera device's
@@ -6145,7 +6475,7 @@ CaptureResult.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -6183,6 +6513,10 @@ CaptureResult.<wbr/></p>
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -6191,10 +6525,10 @@ CaptureResult.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not available if <a href="#static_android.control.maxRegionsAe">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Ae</a> is 0.<wbr/>
 Otherwise will always be present.<wbr/></p>
 <p>The maximum number of regions supported by the device is determined by the value
@@ -6223,10 +6557,10 @@ not reported in the result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL level representation of MeteringRectangle[] is a
 int[5 * area_<wbr/>count].<wbr/>
 Every five elements represent a metering region of
@@ -6236,7 +6570,7 @@ exclusive on xmax and ymax.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -6276,6 +6610,10 @@ exposure.<wbr/></p>
               <p>Any of the entries in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -6284,10 +6622,10 @@ exposure.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only constrains auto-exposure (AE) algorithm,<wbr/> not
 manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a> and
 <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a>.<wbr/></p>
@@ -6295,7 +6633,7 @@ manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -6316,18 +6654,18 @@ manual control of <a href="#controls_android.sensor.exposureTime">android.<wbr/>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_name">IDLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_name">START (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The precapture metering sequence will be started
 by the camera device.<wbr/></p>
 <p>The exact effect of the precapture trigger depends on
 the current AE mode and state.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_name">CANCEL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device will cancel any currently active or completed
 precapture metering sequence,<wbr/> the auto-exposure routine will return to its
 initial state.<wbr/></p></span>
@@ -6347,6 +6685,10 @@ metering sequence when it processes this request.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -6355,10 +6697,10 @@ metering sequence when it processes this request.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is normally set to IDLE,<wbr/> or is not
 included at all in the request settings.<wbr/> When included and
 set to START,<wbr/> the camera device will trigger the auto-exposure (AE)
@@ -6409,10 +6751,10 @@ the camera device will complete them in the optimal order for that device.<wbr/>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must support triggering the AE precapture trigger while an AF trigger is active
 (and vice versa),<wbr/> or at the same time as the AF trigger.<wbr/>  It is acceptable for the HAL to
 treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
@@ -6421,7 +6763,7 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -6442,36 +6784,36 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">INACTIVE</span>
+                    <span class="entry_type_enum_name">INACTIVE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AE is off or recently reset.<wbr/></p>
 <p>When a camera device is opened,<wbr/> it starts in
 this state.<wbr/> This is a transient state,<wbr/> the camera device may skip reporting
 this state in capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SEARCHING</span>
+                    <span class="entry_type_enum_name">SEARCHING (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AE doesn't yet have a good set of control values
 for the current scene.<wbr/></p>
 <p>This is a transient state,<wbr/> the camera device may skip
 reporting this state in capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CONVERGED</span>
+                    <span class="entry_type_enum_name">CONVERGED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AE has a good set of control values for the
 current scene.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">LOCKED</span>
+                    <span class="entry_type_enum_name">LOCKED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AE has been locked.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FLASH_REQUIRED</span>
+                    <span class="entry_type_enum_name">FLASH_REQUIRED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AE has a good set of control values,<wbr/> but flash
 needs to be fired for good quality still
 capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PRECAPTURE</span>
+                    <span class="entry_type_enum_name">PRECAPTURE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AE has been asked to do a precapture sequence
 and is currently executing it.<wbr/></p>
 <p>Precapture can be triggered through setting
@@ -6498,15 +6840,19 @@ capture result.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Switching between or enabling AE modes (<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>) always
 resets the AE state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
 or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
@@ -6536,7 +6882,7 @@ be good to use.<wbr/></p>
 </tr>
 </tbody>
 </table>
-<p>When <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is AE_<wbr/>MODE_<wbr/>ON_<wbr/>*:</p>
+<p>When <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is AE_<wbr/>MODE_<wbr/>ON*:</p>
 <table>
 <thead>
 <tr>
@@ -6657,10 +7003,13 @@ be good to use.<wbr/></p>
 </tr>
 </tbody>
 </table>
+<p>If the camera device supports AE external flash mode (ON_<wbr/>EXTERNAL_<wbr/>FLASH is included in
+<a href="#static_android.control.aeAvailableModes">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Modes</a>),<wbr/> <a href="#dynamic_android.control.aeState">android.<wbr/>control.<wbr/>ae<wbr/>State</a> must be FLASH_<wbr/>REQUIRED after
+the camera device finishes AE scan and it's too dark without flash.<wbr/></p>
 <p>For the above table,<wbr/> the camera device may skip reporting any state changes that happen
 without application intervention (i.<wbr/>e.<wbr/> mode switch,<wbr/> trigger,<wbr/> locking).<wbr/> Any state that
 can be skipped in that manner is called a transient state.<wbr/></p>
-<p>For example,<wbr/> for above AE modes (AE_<wbr/>MODE_<wbr/>ON_<wbr/>*),<wbr/> in addition to the state transitions
+<p>For example,<wbr/> for above AE modes (AE_<wbr/>MODE_<wbr/>ON*),<wbr/> in addition to the state transitions
 listed in above table,<wbr/> it is also legal for the camera device to skip one or more
 transient states between two results.<wbr/> See below table for examples:</p>
 <table>
@@ -6721,7 +7070,7 @@ transient states between two results.<wbr/> See below table for examples:</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -6742,13 +7091,13 @@ transient states between two results.<wbr/> See below table for examples:</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The auto-focus routine does not control the lens;
 <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> is controlled by the
 application.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Basic automatic focus mode.<wbr/></p>
 <p>In this mode,<wbr/> the lens does not move unless
 the autofocus trigger action is called.<wbr/> When that trigger
@@ -6761,7 +7110,7 @@ is fixed-focus.<wbr/></p>
 and sets the AF state to INACTIVE.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MACRO</span>
+                    <span class="entry_type_enum_name">MACRO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Close-up focusing mode.<wbr/></p>
 <p>In this mode,<wbr/> the lens does not move unless the
 autofocus trigger action is called.<wbr/> When that trigger is
@@ -6776,7 +7125,7 @@ position to default,<wbr/> and sets the AF state to
 INACTIVE.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CONTINUOUS_VIDEO</span>
+                    <span class="entry_type_enum_name">CONTINUOUS_VIDEO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
 position continually to attempt to provide a
 constantly-in-focus image stream.<wbr/></p>
@@ -6796,7 +7145,7 @@ ongoing PASSIVE_<wbr/>SCAN must immediately be
 canceled.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CONTINUOUS_PICTURE</span>
+                    <span class="entry_type_enum_name">CONTINUOUS_PICTURE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>In this mode,<wbr/> the AF algorithm modifies the lens
 position continually to attempt to provide a
 constantly-in-focus image stream.<wbr/></p>
@@ -6815,7 +7164,7 @@ should transition back to INACTIVE and then act as if it
 has just been started.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">EDOF</span>
+                    <span class="entry_type_enum_name">EDOF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Extended depth of field (digital focus) mode.<wbr/></p>
 <p>The camera device will produce images with an extended
 depth of field automatically; no special focusing
@@ -6839,6 +7188,10 @@ mode it is set to.<wbr/></p>
               <p><a href="#static_android.control.afAvailableModes">android.<wbr/>control.<wbr/>af<wbr/>Available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -6847,10 +7200,10 @@ mode it is set to.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> = AUTO and the lens is not fixed focus
 (i.<wbr/>e.<wbr/> <code><a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a> &gt; 0</code>).<wbr/> Also note that
 when <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/> the behavior of AF is device
@@ -6863,10 +7216,10 @@ in result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When afMode is AUTO or MACRO,<wbr/> the lens must not move until an AF trigger is sent in a
 request (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a> <code>==</code> START).<wbr/> After an AF trigger,<wbr/> the afState will end
 up with either FOCUSED_<wbr/>LOCKED or NOT_<wbr/>FOCUSED_<wbr/>LOCKED state (see
@@ -6890,7 +7243,7 @@ that will arise on camera modules with open-loop VCMs.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -6928,6 +7281,10 @@ that will arise on camera modules with open-loop VCMs.<wbr/></p>
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -6936,10 +7293,10 @@ that will arise on camera modules with open-loop VCMs.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not available if <a href="#static_android.control.maxRegionsAf">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Af</a> is 0.<wbr/>
 Otherwise will always be present.<wbr/></p>
 <p>The maximum number of focus areas supported by the device is determined by the value
@@ -6958,7 +7315,8 @@ camera device will add the weights in the overlap region.<wbr/></p>
 is used,<wbr/> all non-zero weights will have the same effect.<wbr/> A region with 0 weight is
 ignored.<wbr/></p>
 <p>If all regions have 0 weight,<wbr/> then no specific metering area needs to be used by the
-camera device.<wbr/></p>
+camera device.<wbr/> The capture result will either be a zero weight region as well,<wbr/> or
+the region selected by the camera device as the focus area of interest.<wbr/></p>
 <p>If the metering region is outside the used <a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a> returned in
 capture result metadata,<wbr/> the camera device will ignore the sections outside the crop
 region and output only the intersection rectangle as the metering region in the result
@@ -6968,10 +7326,10 @@ not reported in the result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL level representation of MeteringRectangle[] is a
 int[5 * area_<wbr/>count].<wbr/>
 Every five elements represent a metering region of
@@ -6981,7 +7339,7 @@ exclusive on xmax and ymax.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -7002,15 +7360,15 @@ exclusive on xmax and ymax.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">IDLE</span>
+                    <span class="entry_type_enum_name">IDLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The trigger is idle.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">START</span>
+                    <span class="entry_type_enum_name">START (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Autofocus will trigger now.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CANCEL</span>
+                    <span class="entry_type_enum_name">CANCEL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Autofocus will return to its initial
 state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
                   </li>
@@ -7028,6 +7386,10 @@ state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -7036,10 +7398,10 @@ state,<wbr/> and cancel any currently active trigger.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is normally set to IDLE,<wbr/> or is not
 included at all in the request settings.<wbr/></p>
 <p>When included and set to START,<wbr/> the camera device will trigger the
@@ -7061,10 +7423,10 @@ changes to <a href="#dynamic_android.control.afState">android.<wbr/>control.<wbr
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must support triggering the AF trigger while an AE precapture trigger is active
 (and vice versa),<wbr/> or at the same time as the AE trigger.<wbr/>  It is acceptable for the HAL to
 treat these as two consecutive triggers,<wbr/> for example handling the AF trigger and then the
@@ -7073,7 +7435,7 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -7094,7 +7456,7 @@ to minimize the latency for converging both focus and exposure/<wbr/>flash usage
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">INACTIVE</span>
+                    <span class="entry_type_enum_name">INACTIVE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AF is off or has not yet tried to scan/<wbr/>been asked
 to scan.<wbr/></p>
 <p>When a camera device is opened,<wbr/> it starts in this
@@ -7103,7 +7465,7 @@ skip reporting this state in capture
 result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PASSIVE_SCAN</span>
+                    <span class="entry_type_enum_name">PASSIVE_SCAN (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AF is currently performing an AF scan initiated the
 camera device in a continuous autofocus mode.<wbr/></p>
 <p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient
@@ -7111,7 +7473,7 @@ state,<wbr/> the camera device may skip reporting this state in
 capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PASSIVE_FOCUSED</span>
+                    <span class="entry_type_enum_name">PASSIVE_FOCUSED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AF currently believes it is in focus,<wbr/> but may
 restart scanning at any time.<wbr/></p>
 <p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient
@@ -7119,7 +7481,7 @@ state,<wbr/> the camera device may skip reporting this state in
 capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ACTIVE_SCAN</span>
+                    <span class="entry_type_enum_name">ACTIVE_SCAN (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AF is performing an AF scan because it was
 triggered by AF trigger.<wbr/></p>
 <p>Only used by AUTO or MACRO AF modes.<wbr/> This is a transient
@@ -7127,7 +7489,7 @@ state,<wbr/> the camera device may skip reporting this state in
 capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FOCUSED_LOCKED</span>
+                    <span class="entry_type_enum_name">FOCUSED_LOCKED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AF believes it is focused correctly and has locked
 focus.<wbr/></p>
 <p>This state is reached only after an explicit START AF trigger has been
@@ -7136,7 +7498,7 @@ sent (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>
 a new AF trigger is sent to the camera device (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">NOT_FOCUSED_LOCKED</span>
+                    <span class="entry_type_enum_name">NOT_FOCUSED_LOCKED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AF has failed to focus successfully and has locked
 focus.<wbr/></p>
 <p>This state is reached only after an explicit START AF trigger has been
@@ -7145,7 +7507,7 @@ sent (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>
 a new AF trigger is sent to the camera device (<a href="#controls_android.control.afTrigger">android.<wbr/>control.<wbr/>af<wbr/>Trigger</a>).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PASSIVE_UNFOCUSED</span>
+                    <span class="entry_type_enum_name">PASSIVE_UNFOCUSED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AF finished a passive scan without finding focus,<wbr/>
 and may restart scanning at any time.<wbr/></p>
 <p>Only used by CONTINUOUS_<wbr/>* AF modes.<wbr/> This is a transient state,<wbr/> the camera
@@ -7167,15 +7529,19 @@ scan has finished,<wbr/> it will always go to PASSIVE_<wbr/>FOCUSED.<wbr/></p></
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Switching between or enabling AF modes (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>) always
 resets the AF state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
 or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
@@ -7560,7 +7926,7 @@ See below table for examples:</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -7595,15 +7961,19 @@ CAMERA2_<wbr/>TRIGGER_<wbr/>AUTOFOCUS call</p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Must be 0 if no CAMERA2_<wbr/>TRIGGER_<wbr/>AUTOFOCUS trigger
 received yet by HAL.<wbr/> Always updated even if AF algorithm
 ignores the trigger</p>
@@ -7611,7 +7981,7 @@ ignores the trigger</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -7632,13 +8002,13 @@ ignores the trigger</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-white balance lock is disabled; the AWB
 algorithm is free to update its parameters if in AUTO
 mode.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Auto-white balance lock is enabled; the AWB
 algorithm will not update its parameters while the lock
 is active.<wbr/></p></span>
@@ -7658,6 +8028,10 @@ latest calculated values.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -7666,10 +8040,10 @@ latest calculated values.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to <code>true</code> (ON),<wbr/> the AWB algorithm is locked to its latest parameters,<wbr/>
 and will not change color balance settings until the lock is set to <code>false</code> (OFF).<wbr/></p>
 <p>Since the camera device has a pipeline of in-flight requests,<wbr/> the settings that
@@ -7693,7 +8067,7 @@ AWB is already fixed to a specific setting.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -7714,7 +8088,7 @@ AWB is already fixed to a specific setting.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled.<wbr/></p>
 <p>The application-selected color transform matrix
 (<a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>) and gains
@@ -7722,7 +8096,7 @@ AWB is already fixed to a specific setting.<wbr/></p>
 device for manual white balance control.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is active.<wbr/></p>
 <p>The application's values for <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>
 and <a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are ignored.<wbr/>
@@ -7731,7 +8105,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">INCANDESCENT</span>
+                    <span class="entry_type_enum_name">INCANDESCENT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses incandescent light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -7745,7 +8119,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FLUORESCENT</span>
+                    <span class="entry_type_enum_name">FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses fluorescent light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -7759,7 +8133,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">WARM_FLUORESCENT</span>
+                    <span class="entry_type_enum_name">WARM_FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses warm fluorescent light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -7773,7 +8147,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DAYLIGHT</span>
+                    <span class="entry_type_enum_name">DAYLIGHT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses daylight light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -7787,7 +8161,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CLOUDY_DAYLIGHT</span>
+                    <span class="entry_type_enum_name">CLOUDY_DAYLIGHT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses cloudy daylight light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -7798,7 +8172,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TWILIGHT</span>
+                    <span class="entry_type_enum_name">TWILIGHT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses twilight light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -7809,7 +8183,7 @@ values used by the camera device for the transform and gains
 will be available in the capture result for this request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SHADE</span>
+                    <span class="entry_type_enum_name">SHADE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device's auto-white balance routine is disabled;
 the camera device uses shade light as the assumed scene
 illumination for white balance.<wbr/></p>
@@ -7836,6 +8210,10 @@ is.<wbr/></p>
               <p><a href="#static_android.control.awbAvailableModes">android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -7844,10 +8222,10 @@ is.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control is only effective if <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is AUTO.<wbr/></p>
 <p>When set to the ON mode,<wbr/> the camera device's auto-white balance
 routine is enabled,<wbr/> overriding the application's selected
@@ -7871,7 +8249,7 @@ adjustment.<wbr/> The application's values for
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -7910,6 +8288,10 @@ estimation.<wbr/></p>
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -7918,10 +8300,10 @@ estimation.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Not available if <a href="#static_android.control.maxRegionsAwb">android.<wbr/>control.<wbr/>max<wbr/>Regions<wbr/>Awb</a> is 0.<wbr/>
 Otherwise will always be present.<wbr/></p>
 <p>The maximum number of regions supported by the device is determined by the value
@@ -7950,10 +8332,10 @@ not reported in the result metadata.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL level representation of MeteringRectangle[] is a
 int[5 * area_<wbr/>count].<wbr/>
 Every five elements represent a metering region of
@@ -7963,7 +8345,7 @@ exclusive on xmax and ymax.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -7984,30 +8366,30 @@ exclusive on xmax and ymax.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">CUSTOM</span>
+                    <span class="entry_type_enum_name">CUSTOM (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The goal of this request doesn't fall into the other
 categories.<wbr/> The camera device will default to preview-like
 behavior.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PREVIEW</span>
+                    <span class="entry_type_enum_name">PREVIEW (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a preview-like use case.<wbr/></p>
 <p>The precapture trigger may be used to start off a metering
 w/<wbr/>flash sequence.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">STILL_CAPTURE</span>
+                    <span class="entry_type_enum_name">STILL_CAPTURE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a still capture-type
 use case.<wbr/></p>
 <p>If the flash unit is under automatic control,<wbr/> it may fire as needed.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">VIDEO_RECORD</span>
+                    <span class="entry_type_enum_name">VIDEO_RECORD (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a video recording
 use case.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">VIDEO_SNAPSHOT</span>
+                    <span class="entry_type_enum_name">VIDEO_SNAPSHOT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a video snapshot (still
 image while recording video) use case.<wbr/></p>
 <p>The camera device should take the highest-quality image
@@ -8015,19 +8397,27 @@ possible (given the other settings) without disrupting the
 frame rate of video recording.<wbr/>  </p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for a ZSL usecase; the
 application will stream full-resolution images and
 reprocess one or several later for a final
 capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MANUAL</span>
+                    <span class="entry_type_enum_name">MANUAL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This request is for manual capture use case where
 the applications want to directly control the capture parameters.<wbr/></p>
 <p>For example,<wbr/> the application may wish to manually control
 <a href="#controls_android.sensor.exposureTime">android.<wbr/>sensor.<wbr/>exposure<wbr/>Time</a>,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>,<wbr/> etc.<wbr/></p></span>
                   </li>
+                  <li>
+                    <span class="entry_type_enum_name">MOTION_TRACKING (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>This request is for a motion tracking use case,<wbr/> where
+the application will use camera and inertial sensor data to
+locate and track objects in the world.<wbr/></p>
+<p>The camera device auto-exposure routine will limit the exposure time
+of the camera to no more than 20 milliseconds,<wbr/> to minimize motion blur.<wbr/></p></span>
+                  </li>
                 </ul>
 
             </td> <!-- entry_type -->
@@ -8045,6 +8435,10 @@ strategy.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -8053,21 +8447,24 @@ strategy.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control (except for MANUAL) is only effective if
 <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> != OFF</code> and any 3A routine is active.<wbr/></p>
-<p>ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
-contains PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/> MANUAL will be supported if
-<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains MANUAL_<wbr/>SENSOR.<wbr/> Other intent values are
-always supported.<wbr/></p>
+<p>All intents are supported by all devices,<wbr/> except that:
+  * ZERO_<wbr/>SHUTTER_<wbr/>LAG will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
+PRIVATE_<wbr/>REPROCESSING or YUV_<wbr/>REPROCESSING.<wbr/>
+  * MANUAL will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
+MANUAL_<wbr/>SENSOR.<wbr/>
+  * MOTION_<wbr/>TRACKING will be supported if <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
+MOTION_<wbr/>TRACKING.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -8088,7 +8485,7 @@ always supported.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">INACTIVE</span>
+                    <span class="entry_type_enum_name">INACTIVE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AWB is not in auto mode,<wbr/> or has not yet started metering.<wbr/></p>
 <p>When a camera device is opened,<wbr/> it starts in this
 state.<wbr/> This is a transient state,<wbr/> the camera device may
@@ -8096,19 +8493,19 @@ skip reporting this state in capture
 result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SEARCHING</span>
+                    <span class="entry_type_enum_name">SEARCHING (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AWB doesn't yet have a good set of control
 values for the current scene.<wbr/></p>
 <p>This is a transient state,<wbr/> the camera device
 may skip reporting this state in capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CONVERGED</span>
+                    <span class="entry_type_enum_name">CONVERGED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AWB has a good set of control values for the
 current scene.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">LOCKED</span>
+                    <span class="entry_type_enum_name">LOCKED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>AWB has been locked.<wbr/></p></span>
                   </li>
                 </ul>
@@ -8125,15 +8522,19 @@ current scene.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Switching between or enabling AWB modes (<a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>) always
 resets the AWB state to INACTIVE.<wbr/> Similarly,<wbr/> switching between <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a>,<wbr/>
 or <a href="#controls_android.control.sceneMode">android.<wbr/>control.<wbr/>scene<wbr/>Mode</a> if <code><a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> == USE_<wbr/>SCENE_<wbr/>MODE</code> resets all
@@ -8253,7 +8654,7 @@ transient states between two results.<wbr/> See below table for examples:</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -8274,56 +8675,56 @@ transient states between two results.<wbr/> See below table for examples:</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No color effect will be applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MONO</span>
+                    <span class="entry_type_enum_name">MONO (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "monocolor" effect where the image is mapped into
 a single color.<wbr/></p>
 <p>This will typically be grayscale.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">NEGATIVE</span>
+                    <span class="entry_type_enum_name">NEGATIVE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "photo-negative" effect where the image's colors
 are inverted.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SOLARIZE</span>
+                    <span class="entry_type_enum_name">SOLARIZE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "solarisation" effect (Sabattier effect) where the
 image is wholly or partially reversed in
 tone.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SEPIA</span>
+                    <span class="entry_type_enum_name">SEPIA (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "sepia" effect where the image is mapped into warm
 gray,<wbr/> red,<wbr/> and brown tones.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">POSTERIZE</span>
+                    <span class="entry_type_enum_name">POSTERIZE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "posterization" effect where the image uses
 discrete regions of tone rather than a continuous
 gradient of tones.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">WHITEBOARD</span>
+                    <span class="entry_type_enum_name">WHITEBOARD (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "whiteboard" effect where the image is typically displayed
 as regions of white,<wbr/> with black or grey details.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BLACKBOARD</span>
+                    <span class="entry_type_enum_name">BLACKBOARD (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>A "blackboard" effect where the image is typically displayed
 as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AQUA</span>
+                    <span class="entry_type_enum_name">AQUA (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>An "aqua" effect where a blue hue is added to the image.<wbr/></p></span>
                   </li>
@@ -8342,6 +8743,10 @@ as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
               <p><a href="#static_android.control.availableEffects">android.<wbr/>control.<wbr/>available<wbr/>Effects</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -8350,10 +8755,10 @@ as regions of black,<wbr/> with white or grey details.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When this mode is set,<wbr/> a color effect will be applied
 to images produced by the camera device.<wbr/> The interpretation
 and implementation of these color effects is left to the
@@ -8364,7 +8769,7 @@ devices.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -8385,7 +8790,7 @@ devices.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Full application control of pipeline.<wbr/></p>
 <p>All control by the device's metering and focusing (3A)
 routines is disabled,<wbr/> and no other settings in
@@ -8400,14 +8805,14 @@ when control is switched to AUTO mode,<wbr/> good control values
 can be immediately applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">AUTO</span>
+                    <span class="entry_type_enum_name">AUTO (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use settings for each individual 3A routine.<wbr/></p>
 <p>Manual control of capture parameters is disabled.<wbr/> All
 controls in android.<wbr/>control.<wbr/>* besides sceneMode take
 effect.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">USE_SCENE_MODE</span>
+                    <span class="entry_type_enum_name">USE_SCENE_MODE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Use a specific scene mode.<wbr/></p>
 <p>Enabling this disables control.<wbr/>aeMode,<wbr/> control.<wbr/>awbMode and
@@ -8419,7 +8824,7 @@ This setting can only be used if scene mode is supported (i.<wbr/>e.<wbr/>
 contain some modes other than DISABLED).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">OFF_KEEP_STATE</span>
+                    <span class="entry_type_enum_name">OFF_KEEP_STATE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Same as OFF mode,<wbr/> except that this capture will not be
 used by camera device background auto-exposure,<wbr/> auto-white balance and
@@ -8446,6 +8851,10 @@ routines.<wbr/></p>
               <p><a href="#static_android.control.availableModes">android.<wbr/>control.<wbr/>available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -8454,20 +8863,20 @@ routines.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is a top-level 3A control switch.<wbr/> When set to OFF,<wbr/> all 3A control
 by the camera device is disabled.<wbr/> The application must set the fields for
 capture parameters itself.<wbr/></p>
 <p>When set to AUTO,<wbr/> the individual algorithm controls in
 android.<wbr/>control.<wbr/>* are in effect,<wbr/> such as <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a>.<wbr/></p>
 <p>When set to USE_<wbr/>SCENE_<wbr/>MODE,<wbr/> the individual controls in
-android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device implements
-one of the scene mode settings (such as ACTION,<wbr/> SUNSET,<wbr/> or PARTY)
-as it wishes.<wbr/> The camera device scene mode 3A settings are provided by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
+android.<wbr/>control.<wbr/>* are mostly disabled,<wbr/> and the camera device
+implements one of the scene mode settings (such as ACTION,<wbr/>
+SUNSET,<wbr/> or PARTY) as it wishes.<wbr/> The camera device scene mode
+3A settings are provided by <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">capture results</a>.<wbr/></p>
 <p>When set to OFF_<wbr/>KEEP_<wbr/>STATE,<wbr/> it is similar to OFF mode,<wbr/> the only difference
 is that this frame will not be used by camera device background 3A statistics
 update,<wbr/> as if this frame is never captured.<wbr/> This mode can be used in the scenario
@@ -8477,7 +8886,7 @@ the subsequent auto 3A capture results.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -8498,12 +8907,12 @@ the subsequent auto 3A capture results.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">DISABLED</span>
+                    <span class="entry_type_enum_name">DISABLED (v3.2)</span>
                     <span class="entry_type_enum_value">0</span>
                     <span class="entry_type_enum_notes"><p>Indicates that no scene modes are set for a given capture request.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FACE_PRIORITY</span>
+                    <span class="entry_type_enum_name">FACE_PRIORITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>If face detection support exists,<wbr/> use face
 detection data for auto-focus,<wbr/> auto-white balance,<wbr/> and
 auto-exposure routines.<wbr/></p>
@@ -8516,91 +8925,91 @@ face detection statistics to the framework).<wbr/></p>
 remain active when FACE_<wbr/>PRIORITY is set.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ACTION</span>
+                    <span class="entry_type_enum_name">ACTION (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving objects.<wbr/></p>
 <p>Similar to SPORTS.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PORTRAIT</span>
+                    <span class="entry_type_enum_name">PORTRAIT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for still photos of people.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">LANDSCAPE</span>
+                    <span class="entry_type_enum_name">LANDSCAPE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for photos of distant macroscopic objects.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">NIGHT</span>
+                    <span class="entry_type_enum_name">NIGHT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for low-light settings.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">NIGHT_PORTRAIT</span>
+                    <span class="entry_type_enum_name">NIGHT_PORTRAIT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for still photos of people in low-light
 settings.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">THEATRE</span>
+                    <span class="entry_type_enum_name">THEATRE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings where flash must
 remain off.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BEACH</span>
+                    <span class="entry_type_enum_name">BEACH (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor beach settings.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SNOW</span>
+                    <span class="entry_type_enum_name">SNOW (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for bright,<wbr/> outdoor settings containing snow.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SUNSET</span>
+                    <span class="entry_type_enum_name">SUNSET (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for scenes of the setting sun.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">STEADYPHOTO</span>
+                    <span class="entry_type_enum_name">STEADYPHOTO (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized to avoid blurry photos due to small amounts of
 device motion (for example: due to hand shake).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FIREWORKS</span>
+                    <span class="entry_type_enum_name">FIREWORKS (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for nighttime photos of fireworks.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SPORTS</span>
+                    <span class="entry_type_enum_name">SPORTS (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for photos of quickly moving people.<wbr/></p>
 <p>Similar to ACTION.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PARTY</span>
+                    <span class="entry_type_enum_name">PARTY (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for dim,<wbr/> indoor settings with multiple moving
 people.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CANDLELIGHT</span>
+                    <span class="entry_type_enum_name">CANDLELIGHT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for dim settings where the main light source
 is a flame.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BARCODE</span>
+                    <span class="entry_type_enum_name">BARCODE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optimized for accurately capturing a photo of barcode
 for use by camera applications that wish to read the
 barcode value.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_SPEED_VIDEO</span>
+                    <span class="entry_type_enum_name">HIGH_SPEED_VIDEO (v3.2)</span>
                     <span class="entry_type_enum_deprecated">[deprecated]</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>This is deprecated,<wbr/> please use <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
@@ -8665,7 +9074,7 @@ reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommend
 the application avoids unnecessary scene mode switch as much as possible.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HDR</span>
+                    <span class="entry_type_enum_name">HDR (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Turn on a device-specific high dynamic range (HDR) mode.<wbr/></p>
 <p>In this scene mode,<wbr/> the camera device captures images
@@ -8710,7 +9119,7 @@ or capture intents,<wbr/> the images captured will be as if
 the SCENE_<wbr/>MODE was not enabled at all.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT</span>
+                    <span class="entry_type_enum_name">FACE_PRIORITY_LOW_LIGHT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_hidden">[hidden]</span>
                     <span class="entry_type_enum_notes"><p>Same as FACE_<wbr/>PRIORITY scene mode,<wbr/> except that the camera
@@ -8736,7 +9145,7 @@ reducing the noise level of the captured images.<wbr/></p>
 remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DEVICE_CUSTOM_START</span>
+                    <span class="entry_type_enum_name">DEVICE_CUSTOM_START (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_hidden">[hidden]</span>
                     <span class="entry_type_enum_value">100</span>
@@ -8745,7 +9154,7 @@ remain active when FACE_<wbr/>PRIORITY_<wbr/>LOW_<wbr/>LIGHT is set.<wbr/></p></
 customized scene modes.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DEVICE_CUSTOM_END</span>
+                    <span class="entry_type_enum_name">DEVICE_CUSTOM_END (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_hidden">[hidden]</span>
                     <span class="entry_type_enum_value">127</span>
@@ -8768,6 +9177,10 @@ customized scene modes.<wbr/></p></span>
               <p><a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -8776,10 +9189,10 @@ customized scene modes.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Scene modes are custom camera modes optimized for a certain set of conditions and
 capture settings.<wbr/></p>
 <p>This is the mode that that is active when
@@ -8794,27 +9207,27 @@ a subset of these modes.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL implementations that include scene modes are expected to provide
 the per-scene settings to use for <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a>,<wbr/>
 <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a>,<wbr/> and <a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> in
 <a href="#static_android.control.sceneModeOverrides">android.<wbr/>control.<wbr/>scene<wbr/>Mode<wbr/>Overrides</a>.<wbr/></p>
-<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/>
-the HAL must list supported video size and fps range in
-<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>
-1280x720,<wbr/> if the HAL has two different sensor configurations for normal streaming
-mode and high speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture
-requests,<wbr/> the HAL may have to switch between different sensor modes.<wbr/>
-This mode is deprecated in HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
+<p>For HIGH_<wbr/>SPEED_<wbr/>VIDEO mode,<wbr/> if it is included in <a href="#static_android.control.availableSceneModes">android.<wbr/>control.<wbr/>available<wbr/>Scene<wbr/>Modes</a>,<wbr/> the
+HAL must list supported video size and fps range in
+<a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a>.<wbr/> For a given size,<wbr/> e.<wbr/>g.<wbr/>  1280x720,<wbr/>
+if the HAL has two different sensor configurations for normal streaming mode and high
+speed streaming,<wbr/> when this scene mode is set/<wbr/>reset in a sequence of capture requests,<wbr/> the
+HAL may have to switch between different sensor modes.<wbr/>  This mode is deprecated in legacy
+HAL3.<wbr/>3,<wbr/> to support high speed video recording,<wbr/> please implement
 <a href="#static_android.control.availableHighSpeedVideoConfigurations">android.<wbr/>control.<wbr/>available<wbr/>High<wbr/>Speed<wbr/>Video<wbr/>Configurations</a> and CONSTRAINED_<wbr/>HIGH_<wbr/>SPEED_<wbr/>VIDEO
 capbility defined in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -8835,11 +9248,11 @@ capbility defined in <a href="#static_android.request.availableCapabilities">and
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Video stabilization is disabled.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Video stabilization is enabled.<wbr/></p></span>
                   </li>
                 </ul>
@@ -8857,6 +9270,10 @@ active.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -8865,10 +9282,10 @@ active.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Video stabilization automatically warps images from
 the camera in order to stabilize motion between consecutive frames.<wbr/></p>
 <p>If enabled,<wbr/> video stabilization can modify the
@@ -8896,7 +9313,7 @@ both at the same time.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -8930,15 +9347,19 @@ after RAW sensor data is captured.<wbr/></p>
               <p><a href="#static_android.control.postRawSensitivityBoostRange">android.<wbr/>control.<wbr/>post<wbr/>Raw<wbr/>Sensitivity<wbr/>Boost<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Some camera devices support additional digital sensitivity boosting in the
 camera processing pipeline after sensor RAW image is captured.<wbr/>
 Such a boost will be applied to YUV/<wbr/>JPEG format output images but will not
@@ -8960,7 +9381,7 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -8980,12 +9401,12 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FALSE</span>
+                    <span class="entry_type_enum_name">FALSE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Requests with <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE must be captured
 after previous requests.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TRUE</span>
+                    <span class="entry_type_enum_name">TRUE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Requests with <a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a> == STILL_<wbr/>CAPTURE may or may not be
 captured before previous requests.<wbr/></p></span>
                   </li>
@@ -9004,15 +9425,19 @@ captured before previous requests.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If enableZsl is <code>true</code>,<wbr/> the camera device may enable zero-shutter-lag mode for requests with
 STILL_<wbr/>CAPTURE capture intent.<wbr/> The camera device may use images captured in the past to
 produce output images for a zero-shutter-lag request.<wbr/> The result metadata including the
@@ -9038,16 +9463,81 @@ capture templates is always <code>false</code> if present.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>It is valid for HAL to produce regular output images for requests with STILL_<wbr/>CAPTURE
 capture intent.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.control.afSceneChange">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>control.<wbr/>af<wbr/>Scene<wbr/>Change
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">NOT_DETECTED (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Scene change is not detected within the AF region(s).<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">DETECTED (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Scene change is detected within the AF region(s).<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Whether a significant scene change is detected within the currently-set AF
+region(s).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>When the camera focus routine detects a change in the scene it is looking at,<wbr/>
+such as a large shift in camera viewpoint,<wbr/> significant motion in the scene,<wbr/> or a
+significant illumination change,<wbr/> this value will be set to DETECTED for a single capture
+result.<wbr/> Otherwise the value will be NOT_<wbr/>DETECTED.<wbr/> The threshold for detection is similar
+to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes.<wbr/></p>
+<p>This key will be available if the camera device advertises this key via <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -9056,10 +9546,10 @@ capture intent.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_demosaic" class="section">demosaic</td></tr>
+  <tr><td colspan="7" id="section_demosaic" class="section">demosaic</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -9068,6 +9558,7 @@ capture intent.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -9099,12 +9590,12 @@ capture intent.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Minimal or no slowdown of frame rate compared to
 Bayer RAW output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Improved processing quality but the frame rate might be slowed down
 relative to raw output.<wbr/></p></span>
                   </li>
@@ -9123,6 +9614,10 @@ processing.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -9132,7 +9627,7 @@ processing.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -9141,10 +9636,10 @@ processing.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_edge" class="section">edge</td></tr>
+  <tr><td colspan="7" id="section_edge" class="section">edge</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -9153,6 +9648,7 @@ processing.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -9185,28 +9681,29 @@ processing.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No edge enhancement is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply edge enhancement at a quality level that does not slow down frame rate
 relative to sensor output.<wbr/> It may be the same as OFF if edge enhancement will
 slow down frame rate relative to sensor.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply high-quality edge enhancement,<wbr/> at a cost of possibly reduced output frame rate.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
-                    <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have
-edge enhancement applied,<wbr/> while higher-resolution streams have no edge enhancement
-applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned so that
-frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST (since it
-is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
+                    <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different
+levels for different output streams,<wbr/> based on resolution.<wbr/> Streams at maximum recording
+resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>)
+or below have edge enhancement applied,<wbr/> while higher-resolution streams have no edge
+enhancement applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned
+so that frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST
+(since it is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
 <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
 with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
 high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
@@ -9236,7 +9733,11 @@ enhancement.<wbr/></p>
               <p><a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a></p>
             </td>
 
-            <td class="entry_tags">
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
+            <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
                   <li><a href="#tag_REPROC">REPROC</a></li>
@@ -9245,10 +9746,10 @@ enhancement.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Edge enhancement improves sharpness and details in the captured image.<wbr/> OFF means
 no enhancement will be applied by the camera device.<wbr/></p>
 <p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined enhancement
@@ -9272,17 +9773,17 @@ image quality based on the <a href="#controls_android.reprocess.effectiveExposur
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
 adjust the internal edge enhancement reduction parameters appropriately to get the best
 quality images.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -9315,6 +9816,10 @@ applied to the images</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -9324,14 +9829,14 @@ applied to the images</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -9340,6 +9845,7 @@ applied to the images</p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -9390,6 +9896,10 @@ device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -9399,10 +9909,10 @@ device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Full-capability camera devices must always support OFF; camera devices that support
 YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING will list ZERO_<wbr/>SHUTTER_<wbr/>LAG; all devices will
 list FAST.<wbr/></p>
@@ -9410,10 +9920,10 @@ list FAST.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if edge enhancement control is available
 on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
 That is,<wbr/> if the highest quality implementation on the camera device does not slow down
@@ -9421,14 +9931,14 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -9437,6 +9947,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -9469,28 +9980,29 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No edge enhancement is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply edge enhancement at a quality level that does not slow down frame rate
 relative to sensor output.<wbr/> It may be the same as OFF if edge enhancement will
 slow down frame rate relative to sensor.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply high-quality edge enhancement,<wbr/> at a cost of possibly reduced output frame rate.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
-                    <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have
-edge enhancement applied,<wbr/> while higher-resolution streams have no edge enhancement
-applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned so that
-frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST (since it
-is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
+                    <span class="entry_type_enum_notes"><p>Edge enhancement is applied at different
+levels for different output streams,<wbr/> based on resolution.<wbr/> Streams at maximum recording
+resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>)
+or below have edge enhancement applied,<wbr/> while higher-resolution streams have no edge
+enhancement applied.<wbr/> The level of edge enhancement for low-resolution streams is tuned
+so that frame rate is not impacted,<wbr/> and the quality is equal to or better than FAST
+(since it is only applied to lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
 <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
 with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
 high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
@@ -9520,6 +10032,10 @@ enhancement.<wbr/></p>
               <p><a href="#static_android.edge.availableEdgeModes">android.<wbr/>edge.<wbr/>available<wbr/>Edge<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -9529,10 +10045,10 @@ enhancement.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Edge enhancement improves sharpness and details in the captured image.<wbr/> OFF means
 no enhancement will be applied by the camera device.<wbr/></p>
 <p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined enhancement
@@ -9556,17 +10072,17 @@ image quality based on the <a href="#controls_android.reprocess.effectiveExposur
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
 adjust the internal edge enhancement reduction parameters appropriately to get the best
 quality images.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -9575,10 +10091,10 @@ quality images.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_flash" class="section">flash</td></tr>
+  <tr><td colspan="7" id="section_flash" class="section">flash</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -9587,6 +10103,7 @@ quality images.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -9631,6 +10148,10 @@ quality images.<wbr/></p>
               <p>0 - 10</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -9639,10 +10160,10 @@ quality images.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Power for snapshot may use a different scale than
 for torch mode.<wbr/> Only one entry for torch mode will be
 used</p>
@@ -9650,7 +10171,7 @@ used</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -9684,6 +10205,10 @@ exposure</p>
               <p>0-(exposure time-flash duration)</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -9692,17 +10217,17 @@ exposure</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Clamped to (0,<wbr/> exposure time - flash
 duration).<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -9723,16 +10248,16 @@ duration).<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Do not fire the flash for this capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SINGLE</span>
+                    <span class="entry_type_enum_name">SINGLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>If the flash is available and charged,<wbr/> fire flash
 for this capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TORCH</span>
+                    <span class="entry_type_enum_name">TORCH (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Transition flash to continuously on.<wbr/></p></span>
                   </li>
                 </ul>
@@ -9749,6 +10274,10 @@ for this capture.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -9757,10 +10286,10 @@ for this capture.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control is only effective when flash unit is available
 (<code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == true</code>).<wbr/></p>
 <p>When this control is used,<wbr/> the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> must be set to ON or OFF.<wbr/>
@@ -9778,14 +10307,14 @@ for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -9794,6 +10323,7 @@ for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -9828,10 +10358,10 @@ for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FALSE</span>
+                    <span class="entry_type_enum_name">FALSE (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TRUE</span>
+                    <span class="entry_type_enum_name">TRUE (v3.2)</span>
                   </li>
                 </ul>
 
@@ -9848,6 +10378,10 @@ flash unit.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -9856,10 +10390,10 @@ flash unit.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Will be <code>false</code> if no flash is available.<wbr/></p>
 <p>If there is no flash unit,<wbr/> none of the flash controls do
 anything.<wbr/></p>
@@ -9867,7 +10401,7 @@ anything.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -9901,6 +10435,10 @@ again</p>
               <p>0-1e9</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -9909,17 +10447,17 @@ again</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>1 second too long/<wbr/>too short for recharge? Should
 this be power-dependent?</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -9956,6 +10494,10 @@ flash</p>
               <p>0-1 for both</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -9965,7 +10507,7 @@ flash</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -9999,6 +10541,10 @@ power single flash</p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -10008,14 +10554,14 @@ power single flash</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -10024,6 +10570,7 @@ power single flash</p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -10068,6 +10615,10 @@ power single flash</p>
               <p>0 - 10</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -10076,10 +10627,10 @@ power single flash</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Power for snapshot may use a different scale than
 for torch mode.<wbr/> Only one entry for torch mode will be
 used</p>
@@ -10087,7 +10638,7 @@ used</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10121,6 +10672,10 @@ exposure</p>
               <p>0-(exposure time-flash duration)</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -10129,17 +10684,17 @@ exposure</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Clamped to (0,<wbr/> exposure time - flash
 duration).<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10160,16 +10715,16 @@ duration).<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Do not fire the flash for this capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SINGLE</span>
+                    <span class="entry_type_enum_name">SINGLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>If the flash is available and charged,<wbr/> fire flash
 for this capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TORCH</span>
+                    <span class="entry_type_enum_name">TORCH (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Transition flash to continuously on.<wbr/></p></span>
                   </li>
                 </ul>
@@ -10186,6 +10741,10 @@ for this capture.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -10194,10 +10753,10 @@ for this capture.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control is only effective when flash unit is available
 (<code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == true</code>).<wbr/></p>
 <p>When this control is used,<wbr/> the <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> must be set to ON or OFF.<wbr/>
@@ -10215,7 +10774,7 @@ for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10236,23 +10795,23 @@ for use cases such as preview,<wbr/> auto-focus assist,<wbr/> still capture,<wbr
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">UNAVAILABLE</span>
+                    <span class="entry_type_enum_name">UNAVAILABLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No flash on camera.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CHARGING</span>
+                    <span class="entry_type_enum_name">CHARGING (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Flash is charging and cannot be fired.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">READY</span>
+                    <span class="entry_type_enum_name">READY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Flash is ready to fire.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FIRED</span>
+                    <span class="entry_type_enum_name">FIRED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Flash fired for this capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PARTIAL</span>
+                    <span class="entry_type_enum_name">PARTIAL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Flash partially illuminated this frame.<wbr/></p>
 <p>This is usually due to the next or previous frame having
 the flash fire,<wbr/> and the flash spilling into this capture
@@ -10273,15 +10832,19 @@ unit.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When the camera device doesn't have flash unit
 (i.<wbr/>e.<wbr/> <code><a href="#static_android.flash.info.available">android.<wbr/>flash.<wbr/>info.<wbr/>available</a> == false</code>),<wbr/> this state will always be UNAVAILABLE.<wbr/>
 Other states indicate the current flash status.<wbr/></p>
@@ -10299,7 +10862,7 @@ LEGACY devices (i.<wbr/>e.<wbr/> it will be <code>null</code>).<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -10308,10 +10871,10 @@ LEGACY devices (i.<wbr/>e.<wbr/> it will be <code>null</code>).<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_hotPixel" class="section">hotPixel</td></tr>
+  <tr><td colspan="7" id="section_hotPixel" class="section">hotPixel</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -10320,6 +10883,7 @@ LEGACY devices (i.<wbr/>e.<wbr/> it will be <code>null</code>).<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -10351,20 +10915,20 @@ LEGACY devices (i.<wbr/>e.<wbr/> it will be <code>null</code>).<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No hot pixel correction is applied.<wbr/></p>
 <p>The frame rate must not be reduced relative to sensor raw output
 for this option.<wbr/></p>
 <p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Hot pixel correction is applied,<wbr/> without reducing frame
 rate relative to sensor raw output.<wbr/></p>
 <p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>High-quality hot pixel correction is applied,<wbr/> at a cost
 of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
 <p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
@@ -10384,6 +10948,10 @@ of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
               <p><a href="#static_android.hotPixel.availableHotPixelModes">android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -10393,10 +10961,10 @@ of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
 that do not accurately measure the incoming light (i.<wbr/>e.<wbr/> pixels that
 are stuck at an arbitrary value or are oversensitive).<wbr/></p>
@@ -10404,14 +10972,14 @@ are stuck at an arbitrary value or are oversensitive).<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -10420,6 +10988,7 @@ are stuck at an arbitrary value or are oversensitive).<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -10469,6 +11038,10 @@ camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.hotPixel.mode">android.<wbr/>hot<wbr/>Pixel.<wbr/>mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -10478,19 +11051,19 @@ camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>FULL mode camera devices will always support FAST.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>To avoid performance issues,<wbr/> there will be significantly fewer hot
 pixels than actual pixels on the camera sensor.<wbr/>
 HAL must support both FAST and HIGH_<wbr/>QUALITY if hot pixel correction control is available
@@ -10500,14 +11073,14 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -10516,6 +11089,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -10547,20 +11121,20 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No hot pixel correction is applied.<wbr/></p>
 <p>The frame rate must not be reduced relative to sensor raw output
 for this option.<wbr/></p>
 <p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Hot pixel correction is applied,<wbr/> without reducing frame
 rate relative to sensor raw output.<wbr/></p>
 <p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>High-quality hot pixel correction is applied,<wbr/> at a cost
 of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
 <p>The hotpixel map may be returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/></p></span>
@@ -10580,6 +11154,10 @@ of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
               <p><a href="#static_android.hotPixel.availableHotPixelModes">android.<wbr/>hot<wbr/>Pixel.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -10589,10 +11167,10 @@ of possibly reduced frame rate relative to sensor raw output.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Hotpixel correction interpolates out,<wbr/> or otherwise removes,<wbr/> pixels
 that do not accurately measure the incoming light (i.<wbr/>e.<wbr/> pixels that
 are stuck at an arbitrary value or are oversensitive).<wbr/></p>
@@ -10600,7 +11178,7 @@ are stuck at an arbitrary value or are oversensitive).<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -10609,10 +11187,10 @@ are stuck at an arbitrary value or are oversensitive).<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_jpeg" class="section">jpeg</td></tr>
+  <tr><td colspan="7" id="section_jpeg" class="section">jpeg</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -10621,6 +11199,7 @@ are stuck at an arbitrary value or are oversensitive).<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -10665,15 +11244,19 @@ are stuck at an arbitrary value or are oversensitive).<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Setting a location object in a request will include the GPS coordinates of the location
 into any JPEG images captured based on the request.<wbr/> These coordinates can then be
 viewed by anyone who receives the JPEG image.<wbr/></p>
@@ -10681,7 +11264,7 @@ viewed by anyone who receives the JPEG image.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10720,6 +11303,10 @@ EXIF.<wbr/></p>
               <p>(-180 - 180],<wbr/> [-90,<wbr/>90],<wbr/> [-inf,<wbr/> inf]</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -10729,7 +11316,7 @@ EXIF.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10763,6 +11350,10 @@ include in EXIF.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -10772,7 +11363,7 @@ include in EXIF.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10806,6 +11397,10 @@ EXIF.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -10815,7 +11410,7 @@ EXIF.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10849,6 +11444,10 @@ EXIF.<wbr/></p>
               <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -10857,10 +11456,10 @@ EXIF.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The clockwise rotation angle in degrees,<wbr/> relative to the orientation
 to the camera,<wbr/> that the JPEG picture needs to be rotated by,<wbr/> to be viewed
 upright.<wbr/></p>
@@ -10869,8 +11468,8 @@ rotate the image data to match this orientation.<wbr/> When the image data is ro
 the thumbnail data will also be rotated.<wbr/></p>
 <p>Note that this orientation is relative to the orientation of the camera sensor,<wbr/> given
 by <a href="#static_android.sensor.orientation">android.<wbr/>sensor.<wbr/>orientation</a>.<wbr/></p>
-<p>To translate from the device orientation given by the Android sensor APIs,<wbr/> the following
-sample code may be used:</p>
+<p>To translate from the device orientation given by the Android sensor APIs for camera
+sensors which are not EXTERNAL,<wbr/> the following sample code may be used:</p>
 <pre><code>private int getJpegOrientation(CameraCharacteristics c,<wbr/> int deviceOrientation) {
     if (deviceOrientation == android.<wbr/>view.<wbr/>Orientation<wbr/>Event<wbr/>Listener.<wbr/>ORIENTATION_<wbr/>UNKNOWN) return 0;
     int sensorOrientation = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>SENSOR_<wbr/>ORIENTATION);
@@ -10889,11 +11488,13 @@ sample code may be used:</p>
     return jpegOrientation;
 }
 </code></pre>
+<p>For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
+also be set to EXTERNAL.<wbr/> The above code is not relevant in such case.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10927,6 +11528,10 @@ image.<wbr/></p>
               <p>1-100; larger is higher quality</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -10935,16 +11540,16 @@ image.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>85-95 is typical usage range.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -10978,6 +11583,10 @@ thumbnail.<wbr/></p>
               <p>1-100; larger is higher quality</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -10987,7 +11596,7 @@ thumbnail.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11024,6 +11633,10 @@ thumbnail.<wbr/></p>
               <p><a href="#static_android.jpeg.availableThumbnailSizes">android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11032,10 +11645,10 @@ thumbnail.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to (0,<wbr/> 0) value,<wbr/> the JPEG EXIF will not contain thumbnail,<wbr/>
 but the captured JPEG will still be a valid image.<wbr/></p>
 <p>For best results,<wbr/> when issuing a request for a JPEG image,<wbr/> the thumbnail size selected
@@ -11062,10 +11675,10 @@ the camera device will handle thumbnail rotation in one of the following ways:</
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.<wbr/>
 The cropping must be done on the primary jpeg image rather than the sensor active array.<wbr/>
 The stream cropping rule specified by "S5.<wbr/> Cropping" in camera3.<wbr/>h doesn't apply to the
@@ -11073,14 +11686,14 @@ thumbnail image cropping.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -11089,6 +11702,7 @@ thumbnail image cropping.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -11137,6 +11751,10 @@ camera device.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11145,10 +11763,10 @@ camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This list will include at least one non-zero resolution,<wbr/> plus <code>(0,<wbr/>0)</code> for indicating no
 thumbnail should be generated.<wbr/></p>
 <p>Below condiditions will be satisfied for this size list:</p>
@@ -11168,7 +11786,7 @@ and vice versa.<wbr/></li>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11202,29 +11820,33 @@ JPEG buffer</p>
 the camera</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is used for sizing the gralloc buffers for
 JPEG</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -11233,6 +11855,7 @@ JPEG</p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -11277,15 +11900,19 @@ JPEG</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Setting a location object in a request will include the GPS coordinates of the location
 into any JPEG images captured based on the request.<wbr/> These coordinates can then be
 viewed by anyone who receives the JPEG image.<wbr/></p>
@@ -11293,7 +11920,7 @@ viewed by anyone who receives the JPEG image.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11332,6 +11959,10 @@ EXIF.<wbr/></p>
               <p>(-180 - 180],<wbr/> [-90,<wbr/>90],<wbr/> [-inf,<wbr/> inf]</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11341,7 +11972,7 @@ EXIF.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11375,6 +12006,10 @@ include in EXIF.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11384,7 +12019,7 @@ include in EXIF.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11418,6 +12053,10 @@ EXIF.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11427,7 +12066,7 @@ EXIF.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11461,6 +12100,10 @@ EXIF.<wbr/></p>
               <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11469,10 +12112,10 @@ EXIF.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The clockwise rotation angle in degrees,<wbr/> relative to the orientation
 to the camera,<wbr/> that the JPEG picture needs to be rotated by,<wbr/> to be viewed
 upright.<wbr/></p>
@@ -11481,8 +12124,8 @@ rotate the image data to match this orientation.<wbr/> When the image data is ro
 the thumbnail data will also be rotated.<wbr/></p>
 <p>Note that this orientation is relative to the orientation of the camera sensor,<wbr/> given
 by <a href="#static_android.sensor.orientation">android.<wbr/>sensor.<wbr/>orientation</a>.<wbr/></p>
-<p>To translate from the device orientation given by the Android sensor APIs,<wbr/> the following
-sample code may be used:</p>
+<p>To translate from the device orientation given by the Android sensor APIs for camera
+sensors which are not EXTERNAL,<wbr/> the following sample code may be used:</p>
 <pre><code>private int getJpegOrientation(CameraCharacteristics c,<wbr/> int deviceOrientation) {
     if (deviceOrientation == android.<wbr/>view.<wbr/>Orientation<wbr/>Event<wbr/>Listener.<wbr/>ORIENTATION_<wbr/>UNKNOWN) return 0;
     int sensorOrientation = c.<wbr/>get(Camera<wbr/>Characteristics.<wbr/>SENSOR_<wbr/>ORIENTATION);
@@ -11501,11 +12144,13 @@ sample code may be used:</p>
     return jpegOrientation;
 }
 </code></pre>
+<p>For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
+also be set to EXTERNAL.<wbr/> The above code is not relevant in such case.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11539,6 +12184,10 @@ image.<wbr/></p>
               <p>1-100; larger is higher quality</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11547,16 +12196,16 @@ image.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>85-95 is typical usage range.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11589,6 +12238,10 @@ bytes</p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -11597,10 +12250,10 @@ bytes</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If no JPEG output is produced for the request,<wbr/>
 this must be 0.<wbr/></p>
 <p>Otherwise,<wbr/> this describes the real size of the compressed
@@ -11613,7 +12266,7 @@ the JPEG stream will be 1000000 bytes,<wbr/> of which the first
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11647,6 +12300,10 @@ thumbnail.<wbr/></p>
               <p>1-100; larger is higher quality</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11656,7 +12313,7 @@ thumbnail.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11693,6 +12350,10 @@ thumbnail.<wbr/></p>
               <p><a href="#static_android.jpeg.availableThumbnailSizes">android.<wbr/>jpeg.<wbr/>available<wbr/>Thumbnail<wbr/>Sizes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -11701,10 +12362,10 @@ thumbnail.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to (0,<wbr/> 0) value,<wbr/> the JPEG EXIF will not contain thumbnail,<wbr/>
 but the captured JPEG will still be a valid image.<wbr/></p>
 <p>For best results,<wbr/> when issuing a request for a JPEG image,<wbr/> the thumbnail size selected
@@ -11731,10 +12392,10 @@ the camera device will handle thumbnail rotation in one of the following ways:</
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.<wbr/>
 The cropping must be done on the primary jpeg image rather than the sensor active array.<wbr/>
 The stream cropping rule specified by "S5.<wbr/> Cropping" in camera3.<wbr/>h doesn't apply to the
@@ -11742,7 +12403,7 @@ thumbnail image cropping.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -11751,10 +12412,10 @@ thumbnail image cropping.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_lens" class="section">lens</td></tr>
+  <tr><td colspan="7" id="section_lens" class="section">lens</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -11763,6 +12424,7 @@ thumbnail image cropping.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -11809,6 +12471,10 @@ effective aperture diameter.<wbr/></p>
               <p><a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -11817,10 +12483,10 @@ effective aperture diameter.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Setting this value is only supported on the camera devices that have a variable
 aperture lens.<wbr/></p>
 <p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/>
@@ -11839,7 +12505,7 @@ back to the user in the corresponding result.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11873,6 +12539,10 @@ back to the user in the corresponding result.<wbr/></p>
               <p><a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -11881,10 +12551,10 @@ back to the user in the corresponding result.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control will not be supported on most camera devices.<wbr/></p>
 <p>Lens filters are typically used to lower the amount of light the
 sensor is exposed to (measured in steps of EV).<wbr/> As used here,<wbr/> an EV
@@ -11901,7 +12571,7 @@ to the requested value.<wbr/> While the filter density is still changing,<wbr/>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11935,6 +12605,10 @@ to the requested value.<wbr/> While the filter density is still changing,<wbr/>
               <p><a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -11943,10 +12617,10 @@ to the requested value.<wbr/> While the filter density is still changing,<wbr/>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This setting controls the physical focal length of the camera
 device's lens.<wbr/> Changing the focal length changes the field of
 view of the camera device,<wbr/> and is usually used for optical zoom.<wbr/></p>
@@ -11960,7 +12634,7 @@ be set to MOVING.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -11995,6 +12669,10 @@ measured from frontmost surface of the lens.<wbr/></p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -12004,10 +12682,10 @@ measured from frontmost surface of the lens.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control can be used for setting manual focus,<wbr/> on devices that support
 the MANUAL_<wbr/>SENSOR capability and have a variable-focus lens (see
 <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>).<wbr/></p>
@@ -12023,7 +12701,7 @@ for infinity focus.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12044,11 +12722,11 @@ for infinity focus.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Optical stabilization is unavailable.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optical stabilization is enabled.<wbr/></p></span>
                   </li>
@@ -12068,6 +12746,10 @@ when capturing images.<wbr/></p>
               <p><a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -12076,10 +12758,10 @@ when capturing images.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>OIS is used to compensate for motion blur due to small
 movements of the camera during capture.<wbr/> Unlike digital image
 stabilization (<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> OIS
@@ -12101,14 +12783,14 @@ available controls.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -12117,6 +12799,7 @@ available controls.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -12168,6 +12851,10 @@ supported by this camera device.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -12176,10 +12863,10 @@ supported by this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If the camera device doesn't support a variable lens aperture,<wbr/>
 this list will contain only one value,<wbr/> which is the fixed aperture size.<wbr/></p>
 <p>If the camera device supports a variable aperture,<wbr/> the aperture values
@@ -12188,7 +12875,7 @@ in this list will be sorted in ascending order.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12227,6 +12914,10 @@ in this list will be sorted in ascending order.<wbr/></p>
               <p>Values are &gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -12235,10 +12926,10 @@ in this list will be sorted in ascending order.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If a neutral density filter is not supported by this camera device,<wbr/>
 this list will contain only 0.<wbr/> Otherwise,<wbr/> this list will include every
 filter density supported by the camera device,<wbr/> in ascending order.<wbr/></p>
@@ -12246,7 +12937,7 @@ filter density supported by the camera device,<wbr/> in ascending order.<wbr/></
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12286,6 +12977,10 @@ device.<wbr/></p>
               <p>Values are &gt; 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -12295,10 +12990,10 @@ device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If optical zoom is not supported,<wbr/> this list will only contain
 a single value corresponding to the fixed focal length of the
 device.<wbr/> Otherwise,<wbr/> this list will include every focal length supported
@@ -12307,7 +13002,7 @@ by the camera device,<wbr/> in ascending order.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12346,6 +13041,10 @@ by the camera device,<wbr/> in ascending order.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.lens.opticalStabilizationMode">android.<wbr/>lens.<wbr/>optical<wbr/>Stabilization<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -12354,17 +13053,17 @@ by the camera device,<wbr/> in ascending order.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If OIS is not supported by a given camera device,<wbr/> this list will
 contain only OFF.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12399,22 +13098,26 @@ contain only OFF.<wbr/></p>
 within <code>(0.<wbr/>0f,<wbr/> <a href="#static_android.lens.info.minimumFocusDistance">android.<wbr/>lens.<wbr/>info.<wbr/>minimum<wbr/>Focus<wbr/>Distance</a>]</code></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If the lens is not fixed focus,<wbr/> the camera device will report this
 field when <a href="#static_android.lens.info.focusDistanceCalibration">android.<wbr/>lens.<wbr/>info.<wbr/>focus<wbr/>Distance<wbr/>Calibration</a> is APPROXIMATE or CALIBRATED.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12449,6 +13152,10 @@ of the lens that can be brought into sharp focus.<wbr/></p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -12457,20 +13164,20 @@ of the lens that can be brought into sharp focus.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If the lens is fixed-focus,<wbr/> this will be
 0.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Mandatory for FULL devices; LIMITED devices
 must always set this value to 0 for fixed-focus; and may omit
 the minimum focus distance otherwise.<wbr/></p>
@@ -12479,7 +13186,7 @@ the MANUAL_<wbr/>SENSOR capability.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12517,6 +13224,10 @@ the MANUAL_<wbr/>SENSOR capability.<wbr/></p>
               <p>Both values &gt;= 1</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -12525,17 +13236,17 @@ the MANUAL_<wbr/>SENSOR capability.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The map should be on the order of 30-40 rows and columns,<wbr/> and
 must be smaller than 64x64.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12556,7 +13267,7 @@ must be smaller than 64x64.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">UNCALIBRATED</span>
+                    <span class="entry_type_enum_name">UNCALIBRATED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The lens focus distance is not accurate,<wbr/> and the units used for
 <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a> do not correspond to any physical units.<wbr/></p>
 <p>Setting the lens to the same focus distance on separate occasions may
@@ -12567,7 +13278,7 @@ in the range of <code>[0,<wbr/> <a href="#static_android.lens.info.minimumFocusD
 represents the farthest focus.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">APPROXIMATE</span>
+                    <span class="entry_type_enum_name">APPROXIMATE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The lens focus distance is measured in diopters.<wbr/></p>
 <p>However,<wbr/> setting the lens to the same focus distance
 on separate occasions may result in a different real
@@ -12576,7 +13287,7 @@ orientation of the device,<wbr/> the age of the focusing
 mechanism,<wbr/> and the device temperature.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CALIBRATED</span>
+                    <span class="entry_type_enum_name">CALIBRATED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The lens focus distance is measured in diopters,<wbr/> and
 is calibrated.<wbr/></p>
 <p>The lens mechanism is calibrated so that setting the
@@ -12599,6 +13310,10 @@ of best focus.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -12607,10 +13322,10 @@ of best focus.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The lens focus distance calibration quality determines the reliability of
 focus related metadata entries,<wbr/> i.<wbr/>e.<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
 <a href="#dynamic_android.lens.focusRange">android.<wbr/>lens.<wbr/>focus<wbr/>Range</a>,<wbr/> <a href="#static_android.lens.info.hyperfocalDistance">android.<wbr/>lens.<wbr/>info.<wbr/>hyperfocal<wbr/>Distance</a>,<wbr/> and
@@ -12628,10 +13343,10 @@ nearest focus the device can achieve.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For devices advertise APPROXIMATE quality or higher,<wbr/> diopters 0 (infinity
 focus) must work.<wbr/> When autofocus is disabled (<a href="#controls_android.control.afMode">android.<wbr/>control.<wbr/>af<wbr/>Mode</a> == OFF)
 and the lens focus distance is set to 0 diopters
@@ -12642,7 +13357,7 @@ the output diopter value should be changing toward 0.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -12666,15 +13381,15 @@ the output diopter value should be changing toward 0.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FRONT</span>
+                    <span class="entry_type_enum_name">FRONT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device faces the same direction as the device's screen.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BACK</span>
+                    <span class="entry_type_enum_name">BACK (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device faces the opposite direction as the device's screen.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">EXTERNAL</span>
+                    <span class="entry_type_enum_name">EXTERNAL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device is an external camera,<wbr/> and has no fixed facing relative to the
 device's screen.<wbr/></p></span>
                   </li>
@@ -12693,13 +13408,17 @@ device screen.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12738,6 +13457,10 @@ coordinate system.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -12746,10 +13469,10 @@ coordinate system.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The four coefficients that describe the quaternion
 rotation from the Android sensor coordinate system to a
 camera-aligned coordinate system where the X-axis is
@@ -12780,7 +13503,7 @@ used:</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12816,6 +13539,10 @@ used:</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -12824,42 +13551,38 @@ used:</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The position of the camera device's lens optical center,<wbr/>
-as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>,<wbr/> relative to the
-optical center of the largest camera device facing in the
-same direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
-axes</a>.<wbr/> Note that only the axis definitions are shared with
-the sensor coordinate system,<wbr/> but not the origin.<wbr/></p>
-<p>If this device is the largest or only camera device with a
-given facing,<wbr/> then this position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a
-camera device with a lens optical center located 3 cm from
-the main sensor along the +X axis (to the right from the
-user's perspective) will report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
-<p>To transform a pixel coordinates between two cameras
-facing the same direction,<wbr/> first the source camera
-<a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> must be corrected for.<wbr/>  Then
-the source camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs
-to be applied,<wbr/> followed by the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a>
-of the source camera,<wbr/> the translation of the source camera
-relative to the destination camera,<wbr/> the
-<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination camera,<wbr/> and
-finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
-of the destination camera.<wbr/> This obtains a
-radial-distortion-free coordinate in the destination
-camera pixel coordinates.<wbr/></p>
-<p>To compare this against a real image from the destination
-camera,<wbr/> the destination camera image then needs to be
-corrected for radial distortion before comparison or
-sampling.<wbr/></p>
-            </td>
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>.<wbr/></p>
+<p>Prior to Android P,<wbr/> or when <a href="#static_android.lens.poseReference">android.<wbr/>lens.<wbr/>pose<wbr/>Reference</a> is PRIMARY_<wbr/>CAMERA,<wbr/> this position
+is relative to the optical center of the largest camera device facing in the same
+direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor
+coordinate axes</a>.<wbr/> Note that only the axis definitions are shared with the sensor
+coordinate system,<wbr/> but not the origin.<wbr/></p>
+<p>If this device is the largest or only camera device with a given facing,<wbr/> then this
+position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a camera device with a lens optical center located 3 cm
+from the main sensor along the +X axis (to the right from the user's perspective) will
+report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
+<p>To transform a pixel coordinates between two cameras facing the same direction,<wbr/> first
+the source camera <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a> must be corrected for.<wbr/>  Then the source
+camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs to be applied,<wbr/> followed by the
+<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the source camera,<wbr/> the translation of the source camera
+relative to the destination camera,<wbr/> the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination
+camera,<wbr/> and finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> of the destination
+camera.<wbr/> This obtains a radial-distortion-free coordinate in the destination camera pixel
+coordinates.<wbr/></p>
+<p>To compare this against a real image from the destination camera,<wbr/> the destination camera
+image then needs to be corrected for radial distortion before comparison or sampling.<wbr/></p>
+<p>When <a href="#static_android.lens.poseReference">android.<wbr/>lens.<wbr/>pose<wbr/>Reference</a> is GYROSCOPE,<wbr/> then this position is relative to
+the center of the primary gyroscope on the device.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -12900,6 +13623,10 @@ calibration.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -12908,10 +13635,10 @@ calibration.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The five calibration parameters that describe the
 transform from camera-centric 3D coordinates to sensor
 pixel coordinates:</p>
@@ -12949,7 +13676,7 @@ point,<wbr/> <code>z_<wbr/>s = 1</code>,<wbr/> and <code>w_<wbr/>s</code> is a m
 where <code>(0,<wbr/>0)</code> is the top-left of the
 preCorrectionActiveArraySize rectangle.<wbr/> Once the pose and
 intrinsic calibration transforms have been applied to a
-world point,<wbr/> then the <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>
+world point,<wbr/> then the <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a>
 transform needs to be applied,<wbr/> and the result adjusted to
 be in the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> coordinate
 system (where <code>(0,<wbr/> 0)</code> is the top-left of the
@@ -12960,12 +13687,13 @@ output buffers.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
           <tr class="entry" id="static_android.lens.radialDistortion">
             <td class="entry_name
+                entry_name_deprecated
              " rowspan="3">
               android.<wbr/>lens.<wbr/>radial<wbr/>Distortion
             </td>
@@ -12980,6 +13708,7 @@ output buffers.<wbr/></p>
 
 
 
+              <span class="entry_type_deprecated">[deprecated] </span>
 
 
 
@@ -12997,6 +13726,11 @@ radial and tangential lens distortion.<wbr/></p>
             </td>
 
             <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
             </td>
 
             <td class="entry_tags">
@@ -13007,10 +13741,10 @@ radial and tangential lens distortion.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Four radial distortion coefficients <code>[kappa_<wbr/>0,<wbr/> kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
 kappa_<wbr/>3]</code> and two tangential distortion coefficients
 <code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
@@ -13041,14 +13775,162 @@ is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.poseReference">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>pose<wbr/>Reference
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">PRIMARY_CAMERA (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>The value of <a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a> is relative to the optical center of
+the largest camera device facing the same direction as this camera.<wbr/></p>
+<p>This is the default value for API levels before Android P.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">GYROSCOPE (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>The value of <a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a> is relative to the position of the
+primary gyroscope of this Android device.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The origin for <a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a>.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Different calibration methods and use cases can produce better or worse results
+depending on the selected coordinate origin.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.lens.distortion">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>distortion
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The correction coefficients to correct for this camera device's
+radial and tangential lens distortion.<wbr/></p>
+<p>Replaces the deprecated <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> field,<wbr/> which was
+inconsistently defined.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Unitless coefficients.<wbr/>
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Three radial distortion coefficients <code>[kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
+kappa_<wbr/>3]</code> and two tangential distortion coefficients
+<code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
+lens's geometric distortion with the mapping equations:</p>
+<pre><code> x_<wbr/>c = x_<wbr/>i * ( 1 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>4 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>5 * ( r^2 + 2 * x_<wbr/>i^2 )
+ y_<wbr/>c = y_<wbr/>i * ( 1 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>5 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>4 * ( r^2 + 2 * y_<wbr/>i^2 )
+</code></pre>
+<p>Here,<wbr/> <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> are the coordinates to sample in the
+input image that correspond to the pixel values in the
+corrected image at the coordinate <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code>:</p>
+<pre><code> correctedImage(x_<wbr/>i,<wbr/> y_<wbr/>i) = sample_<wbr/>at(x_<wbr/>c,<wbr/> y_<wbr/>c,<wbr/> inputImage)
+</code></pre>
+<p>The pixel coordinates are defined in a coordinate system
+related to the <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
+calibration fields; see that entry for details of the mapping stages.<wbr/>
+Both <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code> and <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code>
+have <code>(0,<wbr/>0)</code> at the lens optical center <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code>,<wbr/> and
+the range of the coordinates depends on the focal length
+terms of the intrinsic calibration.<wbr/></p>
+<p>Finally,<wbr/> <code>r</code> represents the radial distance from the
+optical center,<wbr/> <code>r^2 = x_<wbr/>i^2 + y_<wbr/>i^2</code>.<wbr/></p>
+<p>The distortion model used is the Brown-Conrady model.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -13057,6 +13939,7 @@ is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -13103,6 +13986,10 @@ effective aperture diameter.<wbr/></p>
               <p><a href="#static_android.lens.info.availableApertures">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Apertures</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -13111,10 +13998,10 @@ effective aperture diameter.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Setting this value is only supported on the camera devices that have a variable
 aperture lens.<wbr/></p>
 <p>When this is supported and <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> is OFF,<wbr/>
@@ -13133,7 +14020,7 @@ back to the user in the corresponding result.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13167,6 +14054,10 @@ back to the user in the corresponding result.<wbr/></p>
               <p><a href="#static_android.lens.info.availableFilterDensities">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Filter<wbr/>Densities</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -13175,10 +14066,10 @@ back to the user in the corresponding result.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control will not be supported on most camera devices.<wbr/></p>
 <p>Lens filters are typically used to lower the amount of light the
 sensor is exposed to (measured in steps of EV).<wbr/> As used here,<wbr/> an EV
@@ -13195,7 +14086,7 @@ to the requested value.<wbr/> While the filter density is still changing,<wbr/>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13229,6 +14120,10 @@ to the requested value.<wbr/> While the filter density is still changing,<wbr/>
               <p><a href="#static_android.lens.info.availableFocalLengths">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Focal<wbr/>Lengths</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -13237,10 +14132,10 @@ to the requested value.<wbr/> While the filter density is still changing,<wbr/>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This setting controls the physical focal length of the camera
 device's lens.<wbr/> Changing the focal length changes the field of
 view of the camera device,<wbr/> and is usually used for optical zoom.<wbr/></p>
@@ -13254,7 +14149,7 @@ be set to MOVING.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13289,6 +14184,10 @@ measured from frontmost surface of the lens.<wbr/></p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -13297,16 +14196,16 @@ measured from frontmost surface of the lens.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Should be zero for fixed-focus cameras</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13347,6 +14246,10 @@ sharp focus (depth of field).<wbr/></p>
               <p>&gt;=0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -13355,17 +14258,17 @@ sharp focus (depth of field).<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If variable focus not supported,<wbr/> can still report
 fixed depth of field range</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13386,11 +14289,11 @@ fixed depth of field range</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Optical stabilization is unavailable.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Optical stabilization is enabled.<wbr/></p></span>
                   </li>
@@ -13410,6 +14313,10 @@ when capturing images.<wbr/></p>
               <p><a href="#static_android.lens.info.availableOpticalStabilization">android.<wbr/>lens.<wbr/>info.<wbr/>available<wbr/>Optical<wbr/>Stabilization</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -13418,10 +14325,10 @@ when capturing images.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>OIS is used to compensate for motion blur due to small
 movements of the camera during capture.<wbr/> Unlike digital image
 stabilization (<a href="#controls_android.control.videoStabilizationMode">android.<wbr/>control.<wbr/>video<wbr/>Stabilization<wbr/>Mode</a>),<wbr/> OIS
@@ -13443,7 +14350,7 @@ available controls.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13464,12 +14371,12 @@ available controls.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">STATIONARY</span>
+                    <span class="entry_type_enum_name">STATIONARY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The lens parameters (<a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
 <a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>) are not changing.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MOVING</span>
+                    <span class="entry_type_enum_name">MOVING (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>One or several of the lens parameters
 (<a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
 <a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> or <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>) is
@@ -13489,6 +14396,10 @@ currently changing.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -13497,10 +14408,10 @@ currently changing.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For lens parameters <a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a>,<wbr/> <a href="#controls_android.lens.focusDistance">android.<wbr/>lens.<wbr/>focus<wbr/>Distance</a>,<wbr/>
 <a href="#controls_android.lens.filterDensity">android.<wbr/>lens.<wbr/>filter<wbr/>Density</a> and <a href="#controls_android.lens.aperture">android.<wbr/>lens.<wbr/>aperture</a>,<wbr/> when changes are requested,<wbr/>
 they may take several frames to reach the requested values.<wbr/> This state indicates
@@ -13524,7 +14435,7 @@ is changing.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13563,6 +14474,10 @@ coordinate system.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -13571,10 +14486,10 @@ coordinate system.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The four coefficients that describe the quaternion
 rotation from the Android sensor coordinate system to a
 camera-aligned coordinate system where the X-axis is
@@ -13605,7 +14520,7 @@ used:</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13641,6 +14556,10 @@ used:</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -13649,42 +14568,38 @@ used:</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The position of the camera device's lens optical center,<wbr/>
-as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>,<wbr/> relative to the
-optical center of the largest camera device facing in the
-same direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor coordinate
-axes</a>.<wbr/> Note that only the axis definitions are shared with
-the sensor coordinate system,<wbr/> but not the origin.<wbr/></p>
-<p>If this device is the largest or only camera device with a
-given facing,<wbr/> then this position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a
-camera device with a lens optical center located 3 cm from
-the main sensor along the +X axis (to the right from the
-user's perspective) will report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
-<p>To transform a pixel coordinates between two cameras
-facing the same direction,<wbr/> first the source camera
-<a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> must be corrected for.<wbr/>  Then
-the source camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs
-to be applied,<wbr/> followed by the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a>
-of the source camera,<wbr/> the translation of the source camera
-relative to the destination camera,<wbr/> the
-<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination camera,<wbr/> and
-finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
-of the destination camera.<wbr/> This obtains a
-radial-distortion-free coordinate in the destination
-camera pixel coordinates.<wbr/></p>
-<p>To compare this against a real image from the destination
-camera,<wbr/> the destination camera image then needs to be
-corrected for radial distortion before comparison or
-sampling.<wbr/></p>
-            </td>
-          </tr>
-
-
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+as a three-dimensional vector <code>(x,<wbr/>y,<wbr/>z)</code>.<wbr/></p>
+<p>Prior to Android P,<wbr/> or when <a href="#static_android.lens.poseReference">android.<wbr/>lens.<wbr/>pose<wbr/>Reference</a> is PRIMARY_<wbr/>CAMERA,<wbr/> this position
+is relative to the optical center of the largest camera device facing in the same
+direction as this camera,<wbr/> in the <a href="https://developer.android.com/reference/android/hardware/SensorEvent.html">Android sensor
+coordinate axes</a>.<wbr/> Note that only the axis definitions are shared with the sensor
+coordinate system,<wbr/> but not the origin.<wbr/></p>
+<p>If this device is the largest or only camera device with a given facing,<wbr/> then this
+position will be <code>(0,<wbr/> 0,<wbr/> 0)</code>; a camera device with a lens optical center located 3 cm
+from the main sensor along the +X axis (to the right from the user's perspective) will
+report <code>(0.<wbr/>03,<wbr/> 0,<wbr/> 0)</code>.<wbr/></p>
+<p>To transform a pixel coordinates between two cameras facing the same direction,<wbr/> first
+the source camera <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a> must be corrected for.<wbr/>  Then the source
+camera <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> needs to be applied,<wbr/> followed by the
+<a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the source camera,<wbr/> the translation of the source camera
+relative to the destination camera,<wbr/> the <a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a> of the destination
+camera,<wbr/> and finally the inverse of <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a> of the destination
+camera.<wbr/> This obtains a radial-distortion-free coordinate in the destination camera pixel
+coordinates.<wbr/></p>
+<p>To compare this against a real image from the destination camera,<wbr/> the destination camera
+image then needs to be corrected for radial distortion before comparison or sampling.<wbr/></p>
+<p>When <a href="#static_android.lens.poseReference">android.<wbr/>lens.<wbr/>pose<wbr/>Reference</a> is GYROSCOPE,<wbr/> then this position is relative to
+the center of the primary gyroscope on the device.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -13725,6 +14640,10 @@ calibration.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -13733,10 +14652,10 @@ calibration.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The five calibration parameters that describe the
 transform from camera-centric 3D coordinates to sensor
 pixel coordinates:</p>
@@ -13774,7 +14693,7 @@ point,<wbr/> <code>z_<wbr/>s = 1</code>,<wbr/> and <code>w_<wbr/>s</code> is a m
 where <code>(0,<wbr/>0)</code> is the top-left of the
 preCorrectionActiveArraySize rectangle.<wbr/> Once the pose and
 intrinsic calibration transforms have been applied to a
-world point,<wbr/> then the <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>
+world point,<wbr/> then the <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a>
 transform needs to be applied,<wbr/> and the result adjusted to
 be in the <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a> coordinate
 system (where <code>(0,<wbr/> 0)</code> is the top-left of the
@@ -13785,12 +14704,13 @@ output buffers.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
           <tr class="entry" id="dynamic_android.lens.radialDistortion">
             <td class="entry_name
+                entry_name_deprecated
              " rowspan="3">
               android.<wbr/>lens.<wbr/>radial<wbr/>Distortion
             </td>
@@ -13805,6 +14725,7 @@ output buffers.<wbr/></p>
 
 
 
+              <span class="entry_type_deprecated">[deprecated] </span>
 
 
 
@@ -13822,6 +14743,11 @@ radial and tangential lens distortion.<wbr/></p>
             </td>
 
             <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
             </td>
 
             <td class="entry_tags">
@@ -13832,10 +14758,10 @@ radial and tangential lens distortion.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Four radial distortion coefficients <code>[kappa_<wbr/>0,<wbr/> kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
 kappa_<wbr/>3]</code> and two tangential distortion coefficients
 <code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
@@ -13866,7 +14792,92 @@ is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.lens.distortion">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>lens.<wbr/>distortion
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  5
+                </span>
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The correction coefficients to correct for this camera device's
+radial and tangential lens distortion.<wbr/></p>
+<p>Replaces the deprecated <a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a> field,<wbr/> which was
+inconsistently defined.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              
+            Unitless coefficients.<wbr/>
+          
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_DEPTH">DEPTH</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Three radial distortion coefficients <code>[kappa_<wbr/>1,<wbr/> kappa_<wbr/>2,<wbr/>
+kappa_<wbr/>3]</code> and two tangential distortion coefficients
+<code>[kappa_<wbr/>4,<wbr/> kappa_<wbr/>5]</code> that can be used to correct the
+lens's geometric distortion with the mapping equations:</p>
+<pre><code> x_<wbr/>c = x_<wbr/>i * ( 1 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>4 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>5 * ( r^2 + 2 * x_<wbr/>i^2 )
+ y_<wbr/>c = y_<wbr/>i * ( 1 + kappa_<wbr/>1 * r^2 + kappa_<wbr/>2 * r^4 + kappa_<wbr/>3 * r^6 ) +
+       kappa_<wbr/>5 * (2 * x_<wbr/>i * y_<wbr/>i) + kappa_<wbr/>4 * ( r^2 + 2 * y_<wbr/>i^2 )
+</code></pre>
+<p>Here,<wbr/> <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code> are the coordinates to sample in the
+input image that correspond to the pixel values in the
+corrected image at the coordinate <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code>:</p>
+<pre><code> correctedImage(x_<wbr/>i,<wbr/> y_<wbr/>i) = sample_<wbr/>at(x_<wbr/>c,<wbr/> y_<wbr/>c,<wbr/> inputImage)
+</code></pre>
+<p>The pixel coordinates are defined in a coordinate system
+related to the <a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a>
+calibration fields; see that entry for details of the mapping stages.<wbr/>
+Both <code>[x_<wbr/>i,<wbr/> y_<wbr/>i]</code> and <code>[x_<wbr/>c,<wbr/> y_<wbr/>c]</code>
+have <code>(0,<wbr/>0)</code> at the lens optical center <code>[c_<wbr/>x,<wbr/> c_<wbr/>y]</code>,<wbr/> and
+the range of the coordinates depends on the focal length
+terms of the intrinsic calibration.<wbr/></p>
+<p>Finally,<wbr/> <code>r</code> represents the radial distance from the
+optical center,<wbr/> <code>r^2 = x_<wbr/>i^2 + y_<wbr/>i^2</code>.<wbr/></p>
+<p>The distortion model used is the Brown-Conrady model.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -13875,10 +14886,10 @@ is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_noiseReduction" class="section">noiseReduction</td></tr>
+  <tr><td colspan="7" id="section_noiseReduction" class="section">noiseReduction</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -13887,6 +14898,7 @@ is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -13919,36 +14931,36 @@ is therefore no larger than <code>|<wbr/>r|<wbr/> &lt;= sqrt(2)</code>.<wbr/></p
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No noise reduction is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Noise reduction is applied without reducing frame rate relative to sensor
 output.<wbr/> It may be the same as OFF if noise reduction will reduce frame rate
 relative to sensor.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>High-quality noise reduction is applied,<wbr/> at the cost of possibly reduced frame
 rate relative to sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MINIMAL</span>
+                    <span class="entry_type_enum_name">MINIMAL (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>MINIMAL noise reduction is applied without reducing frame rate relative to
 sensor output.<wbr/> </p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Noise reduction is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have noise
-reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if supported) or no
-noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of noise reduction
-for low-resolution streams is tuned so that frame rate is not impacted,<wbr/> and the quality
-is equal to or better than FAST (since it is only applied to lower-resolution outputs,<wbr/>
-quality may improve from FAST).<wbr/></p>
+based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>)
+or below have noise reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if
+supported) or no noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of
+noise reduction for low-resolution streams is tuned so that frame rate is not impacted,<wbr/>
+and the quality is equal to or better than FAST (since it is only applied to
+lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
 <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
 with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
 high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
@@ -13977,6 +14989,10 @@ be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LA
               <p><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -13986,10 +15002,10 @@ be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LA
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The noise reduction algorithm attempts to improve image quality by removing
 excessive noise added by the capture process,<wbr/> especially in dark conditions.<wbr/></p>
 <p>OFF means no noise reduction will be applied by the camera device,<wbr/> for both raw and
@@ -14019,17 +15035,17 @@ may adjust the noise reduction parameters for best image quality based on the
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
 adjust the internal noise reduction parameters appropriately to get the best quality
 images.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14063,6 +15079,10 @@ applied to the images</p>
               <p>1 - 10</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -14072,14 +15092,14 @@ applied to the images</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -14088,6 +15108,7 @@ applied to the images</p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -14138,6 +15159,10 @@ by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -14147,10 +15172,10 @@ by this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Full-capability camera devices will always support OFF and FAST.<wbr/></p>
 <p>Camera devices that support YUV_<wbr/>REPROCESSING or PRIVATE_<wbr/>REPROCESSING will support
 ZERO_<wbr/>SHUTTER_<wbr/>LAG.<wbr/></p>
@@ -14159,10 +15184,10 @@ ZERO_<wbr/>SHUTTER_<wbr/>LAG.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if noise reduction control is available
 on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
 That is,<wbr/> if the highest quality implementation on the camera device does not slow down
@@ -14170,14 +15195,14 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -14186,6 +15211,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -14218,36 +15244,36 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No noise reduction is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Noise reduction is applied without reducing frame rate relative to sensor
 output.<wbr/> It may be the same as OFF if noise reduction will reduce frame rate
 relative to sensor.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>High-quality noise reduction is applied,<wbr/> at the cost of possibly reduced frame
 rate relative to sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MINIMAL</span>
+                    <span class="entry_type_enum_name">MINIMAL (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>MINIMAL noise reduction is applied without reducing frame rate relative to
 sensor output.<wbr/> </p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG</span>
+                    <span class="entry_type_enum_name">ZERO_SHUTTER_LAG (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Noise reduction is applied at different levels for different output streams,<wbr/>
-based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>) or below have noise
-reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if supported) or no
-noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of noise reduction
-for low-resolution streams is tuned so that frame rate is not impacted,<wbr/> and the quality
-is equal to or better than FAST (since it is only applied to lower-resolution outputs,<wbr/>
-quality may improve from FAST).<wbr/></p>
+based on resolution.<wbr/> Streams at maximum recording resolution (see <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a>)
+or below have noise reduction applied,<wbr/> while higher-resolution streams have MINIMAL (if
+supported) or no noise reduction applied (if MINIMAL is not supported.<wbr/>) The degree of
+noise reduction for low-resolution streams is tuned so that frame rate is not impacted,<wbr/>
+and the quality is equal to or better than FAST (since it is only applied to
+lower-resolution outputs,<wbr/> quality may improve from FAST).<wbr/></p>
 <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
 with YUV or PRIVATE reprocessing,<wbr/> where the application continuously captures
 high-resolution intermediate buffers into a circular buffer,<wbr/> from which a final image is
@@ -14276,6 +15302,10 @@ be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LA
               <p><a href="#static_android.noiseReduction.availableNoiseReductionModes">android.<wbr/>noise<wbr/>Reduction.<wbr/>available<wbr/>Noise<wbr/>Reduction<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -14285,10 +15315,10 @@ be the default mode for CAMERA3_<wbr/>TEMPLATE_<wbr/>ZERO_<wbr/>SHUTTER_<wbr/>LA
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The noise reduction algorithm attempts to improve image quality by removing
 excessive noise added by the capture process,<wbr/> especially in dark conditions.<wbr/></p>
 <p>OFF means no noise reduction will be applied by the camera device,<wbr/> for both raw and
@@ -14318,17 +15348,17 @@ may adjust the noise reduction parameters for best image quality based on the
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For YUV_<wbr/>REPROCESSING The HAL can use <a href="#controls_android.reprocess.effectiveExposureFactor">android.<wbr/>reprocess.<wbr/>effective<wbr/>Exposure<wbr/>Factor</a> to
 adjust the internal noise reduction parameters appropriately to get the best quality
 images.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -14337,10 +15367,10 @@ images.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_quirks" class="section">quirks</td></tr>
+  <tr><td colspan="7" id="section_quirks" class="section">quirks</td></tr>
 
 
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -14349,6 +15379,7 @@ images.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -14397,15 +15428,19 @@ and output (face rectangles).<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Normalized coordinates refer to those in the
 (-1000,<wbr/>1000) range mentioned in the
 android.<wbr/>hardware.<wbr/>Camera API.<wbr/></p>
@@ -14417,7 +15452,7 @@ removed in future versions of camera service.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14453,15 +15488,19 @@ trigger.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL implementations should implement AF trigger
 modes for AUTO,<wbr/> MACRO,<wbr/> CONTINUOUS_<wbr/>FOCUS,<wbr/> and
 CONTINUOUS_<wbr/>PICTURE modes instead of using this flag.<wbr/> Does
@@ -14471,7 +15510,7 @@ removed in future versions of camera service</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14508,15 +15547,19 @@ shutter lag stream</p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL implementations should use gralloc usage flags
 to determine that a stream will be used for
 zero-shutter-lag,<wbr/> instead of relying on an explicit
@@ -14527,7 +15570,7 @@ camera service.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14563,15 +15606,19 @@ returned using multiple process_<wbr/>capture_<wbr/>result calls.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Does not need to be listed in static
 metadata.<wbr/> Support for partial results will be reworked in
 future versions of camera service.<wbr/> This quirk will stop
@@ -14581,23 +15628,23 @@ consideration of future support.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Refer to <code>camera3_<wbr/>capture_<wbr/>result::partial_<wbr/>result</code>
 for information on how to implement partial results.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -14606,6 +15653,7 @@ for information on how to implement partial results.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -14639,12 +15687,12 @@ for information on how to implement partial results.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FINAL</span>
+                    <span class="entry_type_enum_name">FINAL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The last or only metadata result buffer
 for this capture.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PARTIAL</span>
+                    <span class="entry_type_enum_name">PARTIAL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>A partial buffer of result metadata for this
 capture.<wbr/> More result buffers for this capture will be sent
 by the camera device,<wbr/> the last of which will be marked
@@ -14669,15 +15717,19 @@ values.<wbr/></p>
               <p>Optional.<wbr/> Default value is FINAL.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The entries in the result metadata buffers for a
 single capture may not overlap,<wbr/> except for this entry.<wbr/> The
 FINAL buffers must retain FIFO ordering relative to the
@@ -14691,16 +15743,16 @@ only be used by the camera device if quirks.<wbr/>usePartialResult is set to 1.<
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Refer to <code>camera3_<wbr/>capture_<wbr/>result::partial_<wbr/>result</code>
 for information on how to implement partial results.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -14709,10 +15761,10 @@ for information on how to implement partial results.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_request" class="section">request</td></tr>
+  <tr><td colspan="7" id="section_request" class="section">request</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -14721,6 +15773,7 @@ for information on how to implement partial results.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -14771,13 +15824,17 @@ frameCount value).<wbr/></p>
               <p>Any int.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14812,6 +15869,10 @@ frame</p>
               <p>Any int</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -14821,7 +15882,7 @@ frame</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14862,6 +15923,10 @@ for the source of reprocessing data.<wbr/></p>
               <p>Typically,<wbr/> only one entry allowed,<wbr/> must be a valid reprocess stream ID.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_HAL2">HAL2</a></li>
@@ -14870,17 +15935,17 @@ for the source of reprocessing data.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only meaningful when <a href="#controls_android.request.type">android.<wbr/>request.<wbr/>type</a> ==
 REPROCESS.<wbr/> Ignored otherwise</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14900,7 +15965,7 @@ REPROCESS.<wbr/> Ignored otherwise</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">NONE</span>
+                    <span class="entry_type_enum_name">NONE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No metadata should be produced on output,<wbr/> except
 for application-bound buffer data.<wbr/> If no
 application-bound streams exist,<wbr/> no frame should be
@@ -14911,7 +15976,7 @@ information.<wbr/> Timestamp information should still be
 included with any output stream buffers</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_name">FULL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>All metadata should be produced.<wbr/> Statistics will
 only be produced if they are separately
 enabled</p></span>
@@ -14931,6 +15996,10 @@ output</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -14940,7 +16009,7 @@ output</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -14982,6 +16051,10 @@ from this capture must be sent to</p>
 created</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_HAL2">HAL2</a></li>
@@ -14990,10 +16063,10 @@ created</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If no output streams are listed,<wbr/> then the image
 data should simply be discarded.<wbr/> The image data must
 still be captured for metadata and statistics production,<wbr/>
@@ -15002,7 +16075,7 @@ and the lens and flash must operate as requested.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15024,13 +16097,13 @@ and the lens and flash must operate as requested.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">CAPTURE</span>
+                    <span class="entry_type_enum_name">CAPTURE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Capture a new image from the imaging hardware,<wbr/>
 and process it according to the
 settings</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">REPROCESS</span>
+                    <span class="entry_type_enum_name">REPROCESS (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Process previously captured data; the
 <a href="#controls_android.request.inputStreams">android.<wbr/>request.<wbr/>input<wbr/>Streams</a> parameter determines the
 source reprocessing stream.<wbr/> TODO: Mark dynamic metadata
@@ -15042,7 +16115,7 @@ needed for reprocessing with [RP]</p></span>
 
             <td class="entry_description">
               <p>The type of the request; either CAPTURE or
-REPROCESS.<wbr/> For HAL3,<wbr/> this tag is redundant.<wbr/></p>
+REPROCESS.<wbr/> For legacy HAL3,<wbr/> this tag is redundant.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -15052,6 +16125,10 @@ REPROCESS.<wbr/> For HAL3,<wbr/> this tag is redundant.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_HAL2">HAL2</a></li>
@@ -15061,14 +16138,14 @@ REPROCESS.<wbr/> For HAL3,<wbr/> this tag is redundant.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -15077,6 +16154,7 @@ REPROCESS.<wbr/> For HAL3,<wbr/> this tag is redundant.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -15130,6 +16208,10 @@ for FULL mode devices (<code><a href="#static_android.info.supportedHardwareLeve
 &gt;= 2 for LIMITED mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>).<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -15138,10 +16220,10 @@ for FULL mode devices (<code><a href="#static_android.info.supportedHardwareLeve
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is a 3 element tuple that contains the max number of output simultaneous
 streams for raw sensor,<wbr/> processed (but not stalling),<wbr/> and processed (and stalling)
 formats respectively.<wbr/> For example,<wbr/> assuming that JPEG is typically a processed and
@@ -15156,17 +16238,17 @@ into the 3 stream types as below:</p>
 <ul>
 <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.<wbr/>
   Typically <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">JPEG format</a>.<wbr/></li>
-<li>Raw formats: <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">RAW_<wbr/>SENSOR</a>,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">RAW10</a>,<wbr/> or <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">RAW12</a>.<wbr/></li>
-<li>Processed (but not-stalling): any non-RAW format without a stall duration.<wbr/>
-  Typically <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">YUV_<wbr/>420_<wbr/>888</a>,<wbr/>
-  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#NV21">NV21</a>,<wbr/> or
-  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12">YV12</a>.<wbr/></li>
+<li>Raw formats: <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW_SENSOR">RAW_<wbr/>SENSOR</a>,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">RAW10</a>,<wbr/> or
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">RAW12</a>.<wbr/></li>
+<li>Processed (but not-stalling): any non-RAW format without a stall duration.<wbr/>  Typically
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">YUV_<wbr/>420_<wbr/>888</a>,<wbr/>
+  <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#NV21">NV21</a>,<wbr/> or <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12">YV12</a>.<wbr/></li>
 </ul>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15202,15 +16284,19 @@ for any <code>RAW</code> formats.<wbr/></p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value contains the max number of output simultaneous
 streams from the raw sensor.<wbr/></p>
 <p>This lists the upper bound of the number of output streams supported by
@@ -15229,7 +16315,7 @@ never support raw streams.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15267,15 +16353,19 @@ for FULL mode devices (<code><a href="#static_android.info.supportedHardwareLeve
 &gt;= 2 for LIMITED mode devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == LIMITED</code>).<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value contains the max number of output simultaneous
 streams for any processed (but not-stalling) formats.<wbr/></p>
 <p>This lists the upper bound of the number of output streams supported by
@@ -15297,7 +16387,7 @@ processed format -- it will return 0 for a non-stalling stream.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15333,15 +16423,19 @@ for any processed (and stalling) formats.<wbr/></p>
               <p>&gt;= 1</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value contains the max number of output simultaneous
 streams for any processed (but not-stalling) formats.<wbr/></p>
 <p>This lists the upper bound of the number of output streams supported by
@@ -15349,8 +16443,7 @@ the camera device.<wbr/> Using more streams simultaneously may require more hard
 CPU resources that will consume more power.<wbr/> The image format for this kind of an output stream can
 be any non-<code>RAW</code> and supported format provided by <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/></p>
 <p>A processed and stalling format is defined as any non-RAW format with a stallDurations
-&gt; 0.<wbr/>  Typically only the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">JPEG format</a> is a
-stalling format.<wbr/></p>
+&gt; 0.<wbr/>  Typically only the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">JPEG format</a> is a stalling format.<wbr/></p>
 <p>For full guarantees,<wbr/> query <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> with a
 processed format -- it will return a non-0 value for a stalling stream.<wbr/></p>
 <p>LEGACY devices will support up to 1 processing/<wbr/>stalling stream.<wbr/></p>
@@ -15358,7 +16451,7 @@ processed format -- it will return a non-0 value for a stalling stream.<wbr/></p
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15398,6 +16491,10 @@ can be allocated at the same time.<wbr/></p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_HAL2">HAL2</a></li>
@@ -15406,17 +16503,17 @@ can be allocated at the same time.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only used by HAL2.<wbr/>x.<wbr/></p>
 <p>When set to 0,<wbr/> it means no reprocess stream is supported.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15428,7 +16525,7 @@ can be allocated at the same time.<wbr/></p>
             <td class="entry_type">
                 <span class="entry_type_name">int32</span>
 
-              <span class="entry_type_visibility"> [public]</span>
+              <span class="entry_type_visibility"> [java_public]</span>
 
 
               <span class="entry_type_hwlevel">[full] </span>
@@ -15450,6 +16547,10 @@ that can be configured and used simultaneously by a camera device.<wbr/></p>
               <p>0 or 1.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_REPROC">REPROC</a></li>
@@ -15458,10 +16559,10 @@ that can be configured and used simultaneously by a camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to 0,<wbr/> it means no input stream is supported.<wbr/></p>
 <p>The image format for a input stream can be any supported format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/> When using an
 input stream,<wbr/> there must be at least one output stream configured to to receive the
@@ -15476,16 +16577,16 @@ should be JPEG.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For the reprocessing flow and controls,<wbr/> see
 hardware/<wbr/>libhardware/<wbr/>include/<wbr/>hardware/<wbr/>camera3.<wbr/>h Section 10 for more details.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15519,15 +16620,19 @@ to the framework.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>A typical minimum value for this is 2 (one stage to expose,<wbr/>
 one stage to readout) from the sensor.<wbr/> The ISP then usually adds
 its own stages to do custom HW processing.<wbr/> Further stages may be
@@ -15544,16 +16649,16 @@ the max pipeline depth will be up to 8 x size of high speed capture request list
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value should be 4 or less,<wbr/> expect for the high speed recording session,<wbr/> where the
 max batch sizes may be larger than 1.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15586,15 +16691,19 @@ a result will be composed of.<wbr/></p>
               <p>&gt;= 1</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>In order to combat the pipeline latency,<wbr/> partial results
 may be delivered to the application layer from the camera device as
 soon as they are available.<wbr/></p>
@@ -15611,7 +16720,7 @@ partial results.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -15636,7 +16745,7 @@ partial results.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">BACKWARD_COMPATIBLE</span>
+                    <span class="entry_type_enum_name">BACKWARD_COMPATIBLE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The minimal set of capabilities that every camera
 device (regardless of <a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a>)
 supports.<wbr/></p>
@@ -15649,7 +16758,7 @@ capability,<wbr/> indicating that they support only depth measurement,<wbr/>
 not standard color output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MANUAL_SENSOR</span>
+                    <span class="entry_type_enum_name">MANUAL_SENSOR (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device can be manually controlled (3A algorithms such
 as auto-exposure,<wbr/> and auto-focus can be bypassed).<wbr/>
@@ -15699,7 +16808,7 @@ additionally return a min frame duration that is greater than
 zero for each supported size-format combination.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">MANUAL_POST_PROCESSING</span>
+                    <span class="entry_type_enum_name">MANUAL_POST_PROCESSING (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device post-processing stages can be manually controlled.<wbr/>
 The camera device supports basic manual control of the image post-processing
@@ -15745,7 +16854,7 @@ will accurately report the values applied by AWB in the result.<wbr/></p>
 controls,<wbr/> but this capability only covers the above list of controls.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">RAW</span>
+                    <span class="entry_type_enum_name">RAW (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device supports outputting RAW buffers and
 metadata for interpreting them.<wbr/></p>
@@ -15763,7 +16872,7 @@ raw sensor images.<wbr/></p>
 </ul></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PRIVATE_REPROCESSING</span>
+                    <span class="entry_type_enum_name">PRIVATE_REPROCESSING (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device supports the Zero Shutter Lag reprocessing use case.<wbr/></p>
 <ul>
@@ -15796,7 +16905,7 @@ raw sensor images.<wbr/></p>
 </ul></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">READ_SENSOR_SETTINGS</span>
+                    <span class="entry_type_enum_name">READ_SENSOR_SETTINGS (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device supports accurately reporting the sensor settings for many of
 the sensor controls while the built-in 3A algorithm is running.<wbr/>  This allows
@@ -15822,58 +16931,38 @@ in the CaptureResult,<wbr/> including when 3A is enabled:</p>
 always be included if the MANUAL_<wbr/>SENSOR capability is available.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BURST_CAPTURE</span>
+                    <span class="entry_type_enum_name">BURST_CAPTURE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device supports capturing high-resolution images at &gt;= 20 frames per
 second,<wbr/> in at least the uncompressed YUV format,<wbr/> when post-processing settings are set
 to FAST.<wbr/> Additionally,<wbr/> maximum-resolution images can be captured at &gt;= 10 frames
 per second.<wbr/>  Here,<wbr/> 'high resolution' means at least 8 megapixels,<wbr/> or the maximum
-resolution of the device,<wbr/> whichever is smaller.<wbr/></p>
-<p>More specifically,<wbr/> this means that a size matching the camera device's active array
-size is listed as a supported size for the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> format in either <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a> or <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighResolutionOutputSizes">StreamConfigurationMap#getHighResolutionOutputSizes</a>,<wbr/>
-with a minimum frame duration for that format and size of either &lt;= 1/<wbr/>20 s,<wbr/> or
-&lt;= 1/<wbr/>10 s,<wbr/> respectively; and the <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a> entry
-lists at least one FPS range where the minimum FPS is &gt;= 1 /<wbr/> minimumFrameDuration
-for the maximum-size YUV_<wbr/>420_<wbr/>888 format.<wbr/>  If that maximum size is listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighResolutionOutputSizes">StreamConfigurationMap#getHighResolutionOutputSizes</a>,<wbr/>
-then the list of resolutions for YUV_<wbr/>420_<wbr/>888 from <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">StreamConfigurationMap#getOutputSizes</a> contains at
-least one resolution &gt;= 8 megapixels,<wbr/> with a minimum frame duration of &lt;= 1/<wbr/>20
-s.<wbr/></p>
-<p>If the device supports the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">ImageFormat#RAW10</a>,<wbr/> <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">ImageFormat#RAW12</a>,<wbr/> then those can also be captured at the same rate
-as the maximum-size YUV_<wbr/>420_<wbr/>888 resolution is.<wbr/></p>
-<p>If the device supports the PRIVATE_<wbr/>REPROCESSING capability,<wbr/> then the same guarantees
-as for the YUV_<wbr/>420_<wbr/>888 format also apply to the <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#PRIVATE">ImageFormat#PRIVATE</a> format.<wbr/></p>
-<p>In addition,<wbr/> the <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a> field is guaranted to have a value between 0
-and 4,<wbr/> inclusive.<wbr/> <a href="#static_android.control.aeLockAvailable">android.<wbr/>control.<wbr/>ae<wbr/>Lock<wbr/>Available</a> and <a href="#static_android.control.awbLockAvailable">android.<wbr/>control.<wbr/>awb<wbr/>Lock<wbr/>Available</a>
-are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
-consistent image output.<wbr/></p></span>
-                  </li>
-                  <li>
-                    <span class="entry_type_enum_name">YUV_REPROCESSING</span>
+resolution of the device,<wbr/> whichever is smaller.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">YUV_REPROCESSING (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device supports the YUV_<wbr/>420_<wbr/>888 reprocessing use case,<wbr/> similar as
 PRIVATE_<wbr/>REPROCESSING,<wbr/> This capability requires the camera device to support the
 following:</p>
 <ul>
 <li>One input stream is supported,<wbr/> that is,<wbr/> <code><a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> == 1</code>.<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> is supported as an output/<wbr/>input format,<wbr/> that is,<wbr/>
-  YUV_<wbr/>420_<wbr/>888 is included in the lists of formats returned by
-  <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a> and
-  <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputFormats">StreamConfigurationMap#getOutputFormats</a>.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> is supported as an output/<wbr/>input
+  format,<wbr/> that is,<wbr/> YUV_<wbr/>420_<wbr/>888 is included in the lists of formats returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a> and <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputFormats">StreamConfigurationMap#getOutputFormats</a>.<wbr/></li>
 <li><a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getValidOutputFormatsForInput">StreamConfigurationMap#getValidOutputFormatsForInput</a>
   returns non-empty int[] for each supported input format returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputFormats">StreamConfigurationMap#getInputFormats</a>.<wbr/></li>
 <li>Each size returned by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getInputSizes">get<wbr/>Input<wbr/>Sizes(YUV_<wbr/>420_<wbr/>888)</a> is also included in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputSizes">get<wbr/>Output<wbr/>Sizes(YUV_<wbr/>420_<wbr/>888)</a></li>
-<li>Using <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> does not cause a frame rate drop
-  relative to the sensor's maximum capture rate (at that resolution).<wbr/></li>
+<li>Using <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> does not cause a frame rate
+  drop relative to the sensor's maximum capture rate (at that resolution).<wbr/></li>
 <li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> will be reprocessable into both
   <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> and <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> formats.<wbr/></li>
 <li>The maximum available resolution for <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> streams (both input/<wbr/>output) will match the
   maximum available resolution of <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#JPEG">ImageFormat#JPEG</a> streams.<wbr/></li>
 <li>Static metadata <a href="#static_android.reprocess.maxCaptureStall">android.<wbr/>reprocess.<wbr/>max<wbr/>Capture<wbr/>Stall</a>.<wbr/></li>
 <li>Only the below controls are effective for reprocessing requests and will be present
-  in capture results.<wbr/> The reprocess requests are from the original capture results that
-  are associated with the intermediate <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a>
-  output buffers.<wbr/>  All other controls in the reprocess requests will be ignored by the
-  camera device.<wbr/><ul>
+  in capture results.<wbr/> The reprocess requests are from the original capture results
+  that are associated with the intermediate <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a> output buffers.<wbr/>  All other controls in the
+  reprocess requests will be ignored by the camera device.<wbr/><ul>
 <li>android.<wbr/>jpeg.<wbr/>*</li>
 <li><a href="#controls_android.noiseReduction.mode">android.<wbr/>noise<wbr/>Reduction.<wbr/>mode</a></li>
 <li><a href="#controls_android.edge.mode">android.<wbr/>edge.<wbr/>mode</a></li>
@@ -15885,58 +16974,54 @@ following:</p>
 </ul></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DEPTH_OUTPUT</span>
+                    <span class="entry_type_enum_name">DEPTH_OUTPUT (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>The camera device can produce depth measurements from its field of view.<wbr/></p>
 <p>This capability requires the camera device to support the following:</p>
 <ul>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH16">ImageFormat#DEPTH16</a> is supported as an output format.<wbr/></li>
-<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH_POINT_CLOUD">Image<wbr/>Format#DEPTH_<wbr/>POINT_<wbr/>CLOUD</a> is optionally supported as an
-  output format.<wbr/></li>
-<li>This camera device,<wbr/> and all camera devices with the same <a href="#static_android.lens.facing">android.<wbr/>lens.<wbr/>facing</a>,<wbr/>
-  will list the following calibration entries in both
-  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a> and
-  <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>:<ul>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH16">ImageFormat#DEPTH16</a> is supported as
+  an output format.<wbr/></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH_POINT_CLOUD">Image<wbr/>Format#DEPTH_<wbr/>POINT_<wbr/>CLOUD</a> is
+  optionally supported as an output format.<wbr/></li>
+<li>This camera device,<wbr/> and all camera devices with the same <a href="#static_android.lens.facing">android.<wbr/>lens.<wbr/>facing</a>,<wbr/> will
+  list the following calibration metadata entries in both <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>
+  and <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>:<ul>
 <li><a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a></li>
 <li><a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a></li>
 <li><a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a></li>
-<li><a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a></li>
+<li><a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a></li>
 </ul>
 </li>
 <li>The <a href="#static_android.depth.depthIsExclusive">android.<wbr/>depth.<wbr/>depth<wbr/>Is<wbr/>Exclusive</a> entry is listed by this device.<wbr/></li>
+<li>As of Android P,<wbr/> the <a href="#static_android.lens.poseReference">android.<wbr/>lens.<wbr/>pose<wbr/>Reference</a> entry is listed by this device.<wbr/></li>
 <li>A LIMITED camera with only the DEPTH_<wbr/>OUTPUT capability does not have to support
   normal YUV_<wbr/>420_<wbr/>888,<wbr/> JPEG,<wbr/> and PRIV-format outputs.<wbr/> It only has to support the DEPTH16
   format.<wbr/></li>
 </ul>
 <p>Generally,<wbr/> depth output operates at a slower frame rate than standard color capture,<wbr/>
 so the DEPTH16 and DEPTH_<wbr/>POINT_<wbr/>CLOUD formats will commonly have a stall duration that
-should be accounted for (see
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>).<wbr/>
+should be accounted for (see <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>).<wbr/>
 On a device that supports both depth and color-based output,<wbr/> to enable smooth preview,<wbr/>
 using a repeating burst is recommended,<wbr/> where a depth-output target is only included
 once every N frames,<wbr/> where N is the ratio between preview output rate and depth output
 rate,<wbr/> including depth stall time.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CONSTRAINED_HIGH_SPEED_VIDEO</span>
+                    <span class="entry_type_enum_name">CONSTRAINED_HIGH_SPEED_VIDEO (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
-                    <span class="entry_type_enum_notes"><p>The device supports constrained high speed video recording (frame rate &gt;=120fps)
-use case.<wbr/> The camera device will support high speed capture session created by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>,<wbr/> which
-only accepts high speed request lists created by
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>.<wbr/></p>
-<p>A camera device can still support high speed video streaming by advertising the high speed
-FPS ranges in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a>.<wbr/> For this case,<wbr/> all normal
-capture request per frame control and synchronization requirements will apply to
-the high speed fps ranges,<wbr/> the same as all other fps ranges.<wbr/> This capability describes
-the capability of a specialized operating mode with many limitations (see below),<wbr/> which
-is only targeted at high speed video recording.<wbr/></p>
-<p>The supported high speed video sizes and fps ranges are specified in
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/>
-To get desired output frame rates,<wbr/> the application is only allowed to select video size
-and FPS range combinations provided by
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoSizes">StreamConfigurationMap#getHighSpeedVideoSizes</a>.<wbr/>
-The fps range can be controlled via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
+                    <span class="entry_type_enum_notes"><p>The device supports constrained high speed video recording (frame rate &gt;=120fps) use
+case.<wbr/> The camera device will support high speed capture session created by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>,<wbr/> which
+only accepts high speed request lists created by <a href="https://developer.android.com/reference/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.html#createHighSpeedRequestList">CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList</a>.<wbr/></p>
+<p>A camera device can still support high speed video streaming by advertising the high
+speed FPS ranges in <a href="#static_android.control.aeAvailableTargetFpsRanges">android.<wbr/>control.<wbr/>ae<wbr/>Available<wbr/>Target<wbr/>Fps<wbr/>Ranges</a>.<wbr/> For this case,<wbr/> all
+normal capture request per frame control and synchronization requirements will apply
+to the high speed fps ranges,<wbr/> the same as all other fps ranges.<wbr/> This capability
+describes the capability of a specialized operating mode with many limitations (see
+below),<wbr/> which is only targeted at high speed video recording.<wbr/></p>
+<p>The supported high speed video sizes and fps ranges are specified in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/>
+To get desired output frame rates,<wbr/> the application is only allowed to select video
+size and FPS range combinations provided by <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoSizes">StreamConfigurationMap#getHighSpeedVideoSizes</a>.<wbr/>  The
+fps range can be controlled via <a href="#controls_android.control.aeTargetFpsRange">android.<wbr/>control.<wbr/>ae<wbr/>Target<wbr/>Fps<wbr/>Range</a>.<wbr/></p>
 <p>In this capability,<wbr/> the camera device will override aeMode,<wbr/> awbMode,<wbr/> and afMode to
 ON,<wbr/> AUTO,<wbr/> and CONTINUOUS_<wbr/>VIDEO,<wbr/> respectively.<wbr/> All post-processing block mode
 controls will be overridden to be FAST.<wbr/> Therefore,<wbr/> no manual control of capture
@@ -15971,19 +17056,16 @@ high frame rate for a given video size,<wbr/> or it will end up with lower recor
 frame rate.<wbr/> If the destination surface is from preview window,<wbr/> the actual preview frame
 rate will be bounded by the screen refresh rate.<wbr/></p>
 <p>The camera device will only support up to 2 high speed simultaneous output surfaces
-(preview and recording surfaces)
-in this mode.<wbr/> Above controls will be effective only if all of below conditions are true:</p>
+(preview and recording surfaces) in this mode.<wbr/> Above controls will be effective only
+if all of below conditions are true:</p>
 <ul>
 <li>The application creates a camera capture session with no more than 2 surfaces via
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>.<wbr/> The
-targeted surfaces must be preview surface (either from
-<a href="https://developer.android.com/reference/android/view/SurfaceView.html">SurfaceView</a> or <a href="https://developer.android.com/reference/android/graphics/SurfaceTexture.html">SurfaceTexture</a>) or
-recording surface(either from <a href="https://developer.android.com/reference/android/media/MediaRecorder.html#getSurface">MediaRecorder#getSurface</a> or
-<a href="https://developer.android.com/reference/android/media/MediaCodec.html#createInputSurface">MediaCodec#createInputSurface</a>).<wbr/></li>
+targeted surfaces must be preview surface (either from <a href="https://developer.android.com/reference/android/view/SurfaceView.html">SurfaceView</a> or <a href="https://developer.android.com/reference/android/graphics/SurfaceTexture.html">SurfaceTexture</a>) or recording
+surface(either from <a href="https://developer.android.com/reference/android/media/MediaRecorder.html#getSurface">MediaRecorder#getSurface</a> or <a href="https://developer.android.com/reference/android/media/MediaCodec.html#createInputSurface">MediaCodec#createInputSurface</a>).<wbr/></li>
 <li>The stream sizes are selected from the sizes reported by
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoSizes">StreamConfigurationMap#getHighSpeedVideoSizes</a>.<wbr/></li>
-<li>The FPS ranges are selected from
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/></li>
+<li>The FPS ranges are selected from <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getHighSpeedVideoFpsRanges">StreamConfigurationMap#getHighSpeedVideoFpsRanges</a>.<wbr/></li>
 </ul>
 <p>When above conditions are NOT satistied,<wbr/>
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createConstrainedHighSpeedCaptureSession">CameraDevice#createConstrainedHighSpeedCaptureSession</a>
@@ -15993,6 +17075,64 @@ reconfigurations,<wbr/> which may introduce extra latency.<wbr/> It is recommend
 the application avoids unnecessary maximum target FPS changes as much as possible
 during high speed streaming.<wbr/></p></span>
                   </li>
+                  <li>
+                    <span class="entry_type_enum_name">MOTION_TRACKING (v3.3)</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports the MOTION_<wbr/>TRACKING value for
+<a href="#controls_android.control.captureIntent">android.<wbr/>control.<wbr/>capture<wbr/>Intent</a>,<wbr/> which limits maximum exposure time to 20 ms.<wbr/></p>
+<p>This limits the motion blur of capture images,<wbr/> resulting in better image tracking
+results for use cases such as image stabilization or augmented reality.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">LOGICAL_MULTI_CAMERA (v3.3)</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device is a logical camera backed by two or more physical cameras that are
+also exposed to the application.<wbr/></p>
+<p>This capability requires the camera device to support the following:</p>
+<ul>
+<li>This camera device must list the following static metadata entries in <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>:<ul>
+<li><a href="#static_android.logicalMultiCamera.physicalIds">android.<wbr/>logical<wbr/>Multi<wbr/>Camera.<wbr/>physical<wbr/>Ids</a></li>
+<li><a href="#static_android.logicalMultiCamera.sensorSyncType">android.<wbr/>logical<wbr/>Multi<wbr/>Camera.<wbr/>sensor<wbr/>Sync<wbr/>Type</a></li>
+</ul>
+</li>
+<li>The underlying physical cameras' static metadata must list the following entries,<wbr/>
+  so that the application can correlate pixels from the physical streams:<ul>
+<li><a href="#static_android.lens.poseReference">android.<wbr/>lens.<wbr/>pose<wbr/>Reference</a></li>
+<li><a href="#static_android.lens.poseRotation">android.<wbr/>lens.<wbr/>pose<wbr/>Rotation</a></li>
+<li><a href="#static_android.lens.poseTranslation">android.<wbr/>lens.<wbr/>pose<wbr/>Translation</a></li>
+<li><a href="#static_android.lens.intrinsicCalibration">android.<wbr/>lens.<wbr/>intrinsic<wbr/>Calibration</a></li>
+<li><a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a></li>
+</ul>
+</li>
+<li>The SENSOR_<wbr/>INFO_<wbr/>TIMESTAMP_<wbr/>SOURCE of the logical device and physical devices must be
+  the same.<wbr/></li>
+<li>The logical camera device must be LIMITED or higher device.<wbr/></li>
+</ul>
+<p>Both the logical camera device and its underlying physical devices support the
+mandatory stream combinations required for their device levels.<wbr/></p>
+<p>Additionally,<wbr/> for each guaranteed stream combination,<wbr/> the logical camera supports:</p>
+<ul>
+<li>For each guaranteed stream combination,<wbr/> the logical camera supports replacing one
+  logical <a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">YUV_<wbr/>420_<wbr/>888</a>
+  or raw stream with two physical streams of the same size and format,<wbr/> each from a
+  separate physical camera,<wbr/> given that the size and format are supported by both
+  physical cameras.<wbr/></li>
+<li>If the logical camera doesn't advertise RAW capability,<wbr/> but the underlying physical
+  cameras do,<wbr/> the logical camera will support guaranteed stream combinations for RAW
+  capability,<wbr/> except that the RAW streams will be physical streams,<wbr/> each from a separate
+  physical camera.<wbr/> This is usually the case when the physical cameras have different
+  sensor sizes.<wbr/></li>
+</ul>
+<p>Using physical streams in place of a logical stream of the same size and format will
+not slow down the frame rate of the capture,<wbr/> as long as the minimum frame duration
+of the physical and logical streams are the same.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">MONOCHROME (v3.3)</span>
+                    <span class="entry_type_enum_optional">[optional]</span>
+                    <span class="entry_type_enum_notes"><p>The camera device is a monochrome camera that doesn't contain a color filter array,<wbr/>
+and the pixel values on U and V planes are all 128.<wbr/></p></span>
+                  </li>
                 </ul>
 
             </td> <!-- entry_type -->
@@ -16008,15 +17148,19 @@ advertises as fully supporting.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>A capability is a contract that the camera device makes in order
 to be able to satisfy one or more use cases.<wbr/></p>
 <p>Listing a capability guarantees that the whole set of features
@@ -16038,10 +17182,10 @@ devices,<wbr/> but the application should query this key to be sure.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Additional constraint details per-capability will be available
 in the Compatibility Test Suite.<wbr/></p>
 <p>Minimum baseline requirements required for the
@@ -16073,10 +17217,35 @@ addition to the other keys explicitly mentioned in the DEPTH_<wbr/>OUTPUT
 enum notes.<wbr/> The entry <a href="#static_android.depth.maxDepthSamples">android.<wbr/>depth.<wbr/>max<wbr/>Depth<wbr/>Samples</a> must be available
 if the DEPTH_<wbr/>POINT_<wbr/>CLOUD format is supported (HAL pixel format BLOB,<wbr/> dataspace
 DEPTH).<wbr/></p>
+<p>For a camera device with LOGICAL_<wbr/>MULTI_<wbr/>CAMERA capability,<wbr/> it should operate in the
+same way as a physical camera device based on its hardware level and capabilities.<wbr/>
+It's recommended that its feature set is superset of that of individual physical cameras.<wbr/></p>
+<p>For MONOCHROME,<wbr/> the camera device must also advertise BACKWARD_<wbr/>COMPATIBLE capability,<wbr/> and
+it is exclusive of both RAW and MANUAL_<wbr/>POST_<wbr/>PROCESSING capabilities:</p>
+<ul>
+<li>
+<p>To maintain backward compatibility,<wbr/> the camera device must support all
+BACKWARD_<wbr/>COMPATIBLE required keys.<wbr/> The <a href="#static_android.control.awbAvailableModes">android.<wbr/>control.<wbr/>awb<wbr/>Available<wbr/>Modes</a> key only contains
+AUTO,<wbr/> and <a href="#dynamic_android.control.awbState">android.<wbr/>control.<wbr/>awb<wbr/>State</a> are either CONVERGED or LOCKED depending on
+<a href="#controls_android.control.awbLock">android.<wbr/>control.<wbr/>awb<wbr/>Lock</a>.<wbr/></p>
+</li>
+<li>
+<p>A monochrome device doesn't need to advertise DNG related optional metadata tags.<wbr/></p>
+</li>
+<li>
+<p><a href="#controls_android.colorCorrection.mode">android.<wbr/>color<wbr/>Correction.<wbr/>mode</a>,<wbr/> <a href="#controls_android.colorCorrection.transform">android.<wbr/>color<wbr/>Correction.<wbr/>transform</a>,<wbr/> and
+<a href="#controls_android.colorCorrection.gains">android.<wbr/>color<wbr/>Correction.<wbr/>gains</a> are not applicable.<wbr/> So the camera device cannot
+be a FULL device.<wbr/> However,<wbr/> the HAL can still advertise other individual capabilites.<wbr/></p>
+</li>
+<li>
+<p>If the device supports tonemap control,<wbr/> only <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> is used.<wbr/>
+CurveGreen and curveBlue are no-ops.<wbr/></p>
+</li>
+</ul>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -16113,15 +17282,19 @@ to use with <a href="https://developer.android.com/reference/android/hardware/ca
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Attempting to set a key into a CaptureRequest that is not
 listed here will result in an invalid request and will be rejected
 by the camera device.<wbr/></p>
@@ -16133,10 +17306,10 @@ in <a href="#static_android.request.availableCapabilities">android.<wbr/>request
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Vendor tags can be listed here.<wbr/> Vendor tag metadata should also
 use the extensions C api (refer to camera3.<wbr/>h for more details).<wbr/></p>
 <p>Setting/<wbr/>getting vendor tags will be checked against the metadata
@@ -16149,7 +17322,7 @@ via
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -16176,8 +17349,7 @@ via
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>A list of all keys that the camera device has available
-to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>.<wbr/></p>
+              <p>A list of all keys that the camera device has available to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CaptureResult.html">CaptureResult</a>.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -16186,15 +17358,19 @@ to use with <a href="https://developer.android.com/reference/android/hardware/ca
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Attempting to get a key from a CaptureResult that is not
 listed here will always return a <code>null</code> value.<wbr/> Getting a key from
 a CaptureResult that is listed here will generally never return a <code>null</code>
@@ -16213,10 +17389,10 @@ in <a href="#static_android.request.availableCapabilities">android.<wbr/>request
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Tags listed here must always have an entry in the result metadata,<wbr/>
 even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
 matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
@@ -16230,7 +17406,7 @@ here or in the vendor tag list.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -16257,8 +17433,7 @@ here or in the vendor tag list.<wbr/></p>
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>A list of all keys that the camera device has available
-to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>.<wbr/></p>
+              <p>A list of all keys that the camera device has available to use with <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>.<wbr/></p>
             </td>
 
             <td class="entry_units">
@@ -16267,15 +17442,19 @@ to use with <a href="https://developer.android.com/reference/android/hardware/ca
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry follows the same rules as
 <a href="#static_android.request.availableResultKeys">android.<wbr/>request.<wbr/>available<wbr/>Result<wbr/>Keys</a> (except that it applies for
 CameraCharacteristics instead of CaptureResult).<wbr/> See above for more
@@ -16284,10 +17463,10 @@ details.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Keys listed here must always have an entry in the static info metadata,<wbr/>
 even if that size is 0 elements.<wbr/> Only array-type tags (e.<wbr/>g.<wbr/> lists,<wbr/>
 matrices,<wbr/> strings) are allowed to have 0 elements.<wbr/></p>
@@ -16302,69 +17481,45 @@ via <a href="https://developer.android.com/reference/android/hardware/camera2/Ca
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
-        
-
-      <!-- end of kind -->
-      </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
-
-      <thead class="entries_header">
-        <tr>
-          <th class="th_name">Property Name</th>
-          <th class="th_type">Type</th>
-          <th class="th_description">Description</th>
-          <th class="th_units">Units</th>
-          <th class="th_range">Range</th>
-          <th class="th_tags">Tags</th>
-        </tr>
-      </thead>
-
-      <tbody>
-
-        
-
-        
-
-        
-
-        
-
                 
-          <tr class="entry" id="dynamic_android.request.frameCount">
+          <tr class="entry" id="static_android.request.availableSessionKeys">
             <td class="entry_name
-                entry_name_deprecated
-             " rowspan="3">
-              android.<wbr/>request.<wbr/>frame<wbr/>Count
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Session<wbr/>Keys
             </td>
             <td class="entry_type">
                 <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
 
-              <span class="entry_type_visibility"> [hidden]</span>
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [ndk_public]</span>
 
 
+              <span class="entry_type_hwlevel">[legacy] </span>
 
-              <span class="entry_type_deprecated">[deprecated] </span>
 
 
 
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>A frame counter set by the framework.<wbr/> This value monotonically
-increases with every new result (that is,<wbr/> each new result has a unique
-frameCount value).<wbr/></p>
+              <p>A subset of the available request keys that the camera device
+can pass as part of the capture session initialization.<wbr/></p>
             </td>
 
             <td class="entry_units">
-              count of frames
             </td>
 
             <td class="entry_range">
-              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
-              <p>&gt; 0</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
             </td>
 
             <td class="entry_tags">
@@ -16372,30 +17527,88 @@ frameCount value).<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>Reset on release()</p>
+            <td class="entry_details" colspan="6">
+              <p>This is a subset of <a href="#static_android.request.availableRequestKeys">android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys</a> which
+contains a list of keys that are difficult to apply per-frame and
+can result in unexpected delays when modified during the capture session
+lifetime.<wbr/> Typical examples include parameters that require a
+time-consuming hardware re-configuration or internal camera pipeline
+change.<wbr/> For performance reasons we advise clients to pass their initial
+values as part of
+<a href="https://developer.android.com/reference/SessionConfiguration.html#setSessionParameters">SessionConfiguration#setSessionParameters</a>.<wbr/>
+Once the camera capture session is enabled it is also recommended to avoid
+changing them from their initial values set in
+<a href="https://developer.android.com/reference/SessionConfiguration.html#setSessionParameters">SessionConfiguration#setSessionParameters</a>.<wbr/>
+Control over session parameters can still be exerted in capture requests
+but clients should be aware and expect delays during their application.<wbr/>
+An example usage scenario could look like this:</p>
+<ul>
+<li>The camera client starts by quering the session parameter key list via
+  <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableSessionKeys">CameraCharacteristics#getAvailableSessionKeys</a>.<wbr/></li>
+<li>Before triggering the capture session create sequence,<wbr/> a capture request
+  must be built via
+  <a href="https://developer.android.com/reference/CameraDevice.html#createCaptureRequest">CameraDevice#createCaptureRequest</a>
+  using an appropriate template matching the particular use case.<wbr/></li>
+<li>The client should go over the list of session parameters and check
+  whether some of the keys listed matches with the parameters that
+  they intend to modify as part of the first capture request.<wbr/></li>
+<li>If there is no such match,<wbr/> the capture request can be  passed
+  unmodified to
+  <a href="https://developer.android.com/reference/SessionConfiguration.html#setSessionParameters">SessionConfiguration#setSessionParameters</a>.<wbr/></li>
+<li>If matches do exist,<wbr/> the client should update the respective values
+  and pass the request to
+  <a href="https://developer.android.com/reference/SessionConfiguration.html#setSessionParameters">SessionConfiguration#setSessionParameters</a>.<wbr/></li>
+<li>After the capture session initialization completes the session parameter
+  key list can continue to serve as reference when posting or updating
+  further requests.<wbr/> As mentioned above further changes to session
+  parameters should ideally be avoided,<wbr/> if updates are necessary
+  however clients could expect a delay/<wbr/>glitch during the
+  parameter switch.<wbr/></li>
+</ul>
             </td>
           </tr>
 
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Vendor tags can be listed here.<wbr/> Vendor tag metadata should also
+use the extensions C api (refer to
+android.<wbr/>hardware.<wbr/>camera.<wbr/>device.<wbr/>V3_<wbr/>4.<wbr/>Stream<wbr/>Configuration.<wbr/>session<wbr/>Params for more details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not consume any request tags in the session parameters that
+are not listed either here or in the vendor tag list.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible
+via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableSessionKeys">CameraCharacteristics#getAvailableSessionKeys</a>.<wbr/></p>
+            </td>
+          </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
-          <tr class="entry" id="dynamic_android.request.id">
+          <tr class="entry" id="static_android.request.availablePhysicalCameraRequestKeys">
             <td class="entry_name
-             " rowspan="1">
-              android.<wbr/>request.<wbr/>id
+             " rowspan="5">
+              android.<wbr/>request.<wbr/>available<wbr/>Physical<wbr/>Camera<wbr/>Request<wbr/>Keys
             </td>
             <td class="entry_type">
                 <span class="entry_type_name">int32</span>
+                <span class="entry_type_container">x</span>
 
+                <span class="entry_type_array">
+                  n
+                </span>
               <span class="entry_type_visibility"> [hidden]</span>
 
 
+              <span class="entry_type_hwlevel">[limited] </span>
 
 
 
@@ -16403,33 +17616,197 @@ frameCount value).<wbr/></p>
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>An application-specified ID for the current
-request.<wbr/> Must be maintained unchanged in output
-frame</p>
+              <p>A subset of the available request keys that can be overriden for
+physical devices backing a logical multi-camera.<wbr/></p>
             </td>
 
             <td class="entry_units">
-              arbitrary integer assigned by application
             </td>
 
             <td class="entry_range">
-              <p>Any int</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
             </td>
 
             <td class="entry_tags">
-              <ul class="entry_tags">
-                  <li><a href="#tag_V1">V1</a></li>
-              </ul>
             </td>
 
           </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>This is a subset of <a href="#static_android.request.availableRequestKeys">android.<wbr/>request.<wbr/>available<wbr/>Request<wbr/>Keys</a> which contains a list
+of keys that can be overriden using <a href="https://developer.android.com/reference/CaptureRequest/Builder.html#setPhysicalCameraKey">Builder#setPhysicalCameraKey</a>.<wbr/>
+The respective value of such request key can be obtained by calling
+<a href="https://developer.android.com/reference/CaptureRequest/Builder.html#getPhysicalCameraKey">Builder#getPhysicalCameraKey</a>.<wbr/> Capture requests that contain
+individual physical device requests must be built via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureRequest(int,">Set)</a>.<wbr/></p>
+            </td>
+          </tr>
 
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Vendor tags can be listed here.<wbr/> Vendor tag metadata should also
+use the extensions C api (refer to
+android.<wbr/>hardware.<wbr/>camera.<wbr/>device.<wbr/>V3_<wbr/>4.<wbr/>Capture<wbr/>Request.<wbr/>physical<wbr/>Camera<wbr/>Settings for more
+details).<wbr/></p>
+<p>Setting/<wbr/>getting vendor tags will be checked against the metadata
+vendor extensions API and not against this field.<wbr/></p>
+<p>The HAL must not consume any request tags in the session parameters that
+are not listed either here or in the vendor tag list.<wbr/></p>
+<p>There should be no overlap between this set of keys and the available session keys
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableSessionKeys">CameraCharacteristics#getAvailableSessionKeys</a> along
+with any other controls that can have impact on the dual-camera sync.<wbr/></p>
+<p>The public camera2 API will always make the vendor tags visible
+via
+<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailablePhysicalCameraRequestKeys">CameraCharacteristics#getAvailablePhysicalCameraRequestKeys</a>.<wbr/></p>
+            </td>
+          </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
-                
-          <tr class="entry" id="dynamic_android.request.metadataMode">
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.request.frameCount">
+            <td class="entry_name
+                entry_name_deprecated
+             " rowspan="3">
+              android.<wbr/>request.<wbr/>frame<wbr/>Count
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+              <span class="entry_type_deprecated">[deprecated] </span>
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A frame counter set by the framework.<wbr/> This value monotonically
+increases with every new result (that is,<wbr/> each new result has a unique
+frameCount value).<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              count of frames
+            </td>
+
+            <td class="entry_range">
+              <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
+              <p>&gt; 0</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Reset on release()</p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.request.id">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>request.<wbr/>id
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">int32</span>
+
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>An application-specified ID for the current
+request.<wbr/> Must be maintained unchanged in output
+frame</p>
+            </td>
+
+            <td class="entry_units">
+              arbitrary integer assigned by application
+            </td>
+
+            <td class="entry_range">
+              <p>Any int</p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+              </ul>
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.request.metadataMode">
             <td class="entry_name
              " rowspan="1">
               android.<wbr/>request.<wbr/>metadata<wbr/>Mode
@@ -16445,7 +17822,7 @@ frame</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">NONE</span>
+                    <span class="entry_type_enum_name">NONE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No metadata should be produced on output,<wbr/> except
 for application-bound buffer data.<wbr/> If no
 application-bound streams exist,<wbr/> no frame should be
@@ -16456,7 +17833,7 @@ information.<wbr/> Timestamp information should still be
 included with any output stream buffers</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_name">FULL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>All metadata should be produced.<wbr/> Statistics will
 only be produced if they are separately
 enabled</p></span>
@@ -16476,6 +17853,10 @@ output</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -16485,7 +17866,7 @@ output</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -16527,6 +17908,10 @@ from this capture must be sent to</p>
 created</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_HAL2">HAL2</a></li>
@@ -16535,10 +17920,10 @@ created</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If no output streams are listed,<wbr/> then the image
 data should simply be discarded.<wbr/> The image data must
 still be captured for metadata and statistics production,<wbr/>
@@ -16547,7 +17932,7 @@ and the lens and flash must operate as requested.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -16582,15 +17967,19 @@ was available to the framework.<wbr/></p>
               <p>&lt;= <a href="#static_android.request.pipelineMaxDepth">android.<wbr/>request.<wbr/>pipeline<wbr/>Max<wbr/>Depth</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Depending on what settings are used in the request,<wbr/> and
 what streams are configured,<wbr/> the data may undergo less processing,<wbr/>
 and some pipeline stages skipped.<wbr/></p>
@@ -16599,16 +17988,16 @@ and some pipeline stages skipped.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value must always represent the accurate count of how many
 pipeline stages were actually used.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -16617,10 +18006,10 @@ pipeline stages were actually used.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_scaler" class="section">scaler</td></tr>
+  <tr><td colspan="7" id="section_scaler" class="section">scaler</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -16629,6 +18018,7 @@ pipeline stages were actually used.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -16678,6 +18068,10 @@ pipeline stages were actually used.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -16686,10 +18080,10 @@ pipeline stages were actually used.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control can be used to implement digital zoom.<wbr/></p>
 <p>The crop region coordinate system is based off
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with <code>(0,<wbr/> 0)</code> being the
@@ -16724,10 +18118,10 @@ result.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The output streams must maintain square pixels at all
 times,<wbr/> no matter what the relative aspect ratios of the
 crop region and the stream are.<wbr/>  Negative values for
@@ -16771,14 +18165,14 @@ hardware limitations.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -16787,6 +18181,7 @@ hardware limitations.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -16824,7 +18219,7 @@ hardware limitations.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">RAW16</span>
+                    <span class="entry_type_enum_name">RAW16 (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_value">0x20</span>
                     <span class="entry_type_enum_notes"><p>RAW16 is a standard,<wbr/> cross-platform format for raw image
@@ -16852,7 +18247,7 @@ only supported output size).<wbr/></p>
 the full set of performance guarantees.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">RAW_OPAQUE</span>
+                    <span class="entry_type_enum_name">RAW_OPAQUE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_value">0x24</span>
                     <span class="entry_type_enum_notes"><p>RAW_<wbr/>OPAQUE (or
@@ -16877,29 +18272,29 @@ any additional conversions or decrease in framerate.<wbr/></p>
 performance guarantees.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">YV12</span>
+                    <span class="entry_type_enum_name">YV12 (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_value">0x32315659</span>
                     <span class="entry_type_enum_notes"><p>YCrCb 4:2:0 Planar</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">YCrCb_420_SP</span>
+                    <span class="entry_type_enum_name">YCrCb_420_SP (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_value">0x11</span>
                     <span class="entry_type_enum_notes"><p>NV21</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">IMPLEMENTATION_DEFINED</span>
+                    <span class="entry_type_enum_name">IMPLEMENTATION_DEFINED (v3.2)</span>
                     <span class="entry_type_enum_value">0x22</span>
                     <span class="entry_type_enum_notes"><p>System internal format,<wbr/> not application-accessible</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">YCbCr_420_888</span>
+                    <span class="entry_type_enum_name">YCbCr_420_888 (v3.2)</span>
                     <span class="entry_type_enum_value">0x23</span>
                     <span class="entry_type_enum_notes"><p>Flexible YUV420 Format</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BLOB</span>
+                    <span class="entry_type_enum_name">BLOB (v3.2)</span>
                     <span class="entry_type_enum_value">0x21</span>
                     <span class="entry_type_enum_notes"><p>JPEG format</p></span>
                   </li>
@@ -16919,6 +18314,10 @@ camera device for output streams.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -16927,20 +18326,20 @@ camera device for output streams.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All camera devices will support JPEG and YUV_<wbr/>420_<wbr/>888 formats.<wbr/></p>
 <p>When set to YUV_<wbr/>420_<wbr/>888,<wbr/> application can access the YUV420 data directly.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>These format values are from HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>* in
 system/<wbr/>core/<wbr/>include/<wbr/>system/<wbr/>graphics.<wbr/>h.<wbr/></p>
 <p>When IMPLEMENTATION_<wbr/>DEFINED is used,<wbr/> the platform
@@ -16958,7 +18357,7 @@ for that image buffer.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -16999,6 +18398,10 @@ for each resolution in <a href="#static_android.scaler.availableJpegSizes">andro
               <p>TODO: Remove property.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -17007,10 +18410,10 @@ for each resolution in <a href="#static_android.scaler.availableJpegSizes">andro
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This corresponds to the minimum steady-state frame duration when only
 that JPEG stream is active and captured in a burst,<wbr/> with all
 processing (typically in android.<wbr/>*.<wbr/>mode) set to FAST.<wbr/></p>
@@ -17021,7 +18424,7 @@ durations)</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17060,6 +18463,10 @@ durations)</p>
               <p>TODO: Remove property.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -17068,27 +18475,27 @@ durations)</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The resolutions are listed as <code>(width,<wbr/> height)</code> pairs.<wbr/> All camera devices will support
 sensor maximum resolution (defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>).<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must include sensor maximum resolution
 (defined by <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>),<wbr/>
 and should include half/<wbr/>quarter of sensor maximum resolution.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17124,6 +18531,10 @@ crop region height,<wbr/> for <a href="#controls_android.scaler.cropRegion">andr
               <p>&gt;=1</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -17132,10 +18543,10 @@ crop region height,<wbr/> for <a href="#controls_android.scaler.cropRegion">andr
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This represents the maximum amount of zooming possible by
 the camera device,<wbr/> or equivalently,<wbr/> the minimum cropping
 window size.<wbr/></p>
@@ -17146,7 +18557,7 @@ allowed size by the camera device.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17187,6 +18598,10 @@ minimum supportable frame duration for that size.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -17195,10 +18610,10 @@ minimum supportable frame duration for that size.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This should correspond to the frame duration when only that processed
 stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
 set to FAST.<wbr/></p>
@@ -17208,7 +18623,7 @@ be &gt;= max(individual stream min durations).<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17249,6 +18664,10 @@ encoders.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -17257,10 +18676,10 @@ encoders.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The resolutions are listed as <code>(width,<wbr/> height)</code> pairs.<wbr/></p>
 <p>For a given use case,<wbr/> the actual maximum supported resolution
 may be lower than what is listed here,<wbr/> depending on the destination
@@ -17274,10 +18693,10 @@ check if it limits the maximum size for image data.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For FULL capability devices (<code><a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL</code>),<wbr/>
 the HAL must include all JPEG sizes listed in <a href="#static_android.scaler.availableJpegSizes">android.<wbr/>scaler.<wbr/>available<wbr/>Jpeg<wbr/>Sizes</a>
 and each below resolution if it is smaller than or equal to the sensor
@@ -17293,7 +18712,7 @@ the HAL only has to list up to the maximum video size supported by the devices.<
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17334,6 +18753,10 @@ supportable frame duration for that size.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -17342,10 +18765,10 @@ supportable frame duration for that size.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Should correspond to the frame duration when only the raw stream is
 active.<wbr/></p>
 <p>When multiple streams are configured,<wbr/> the minimum
@@ -17355,7 +18778,7 @@ durations)</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17395,13 +18818,17 @@ height</p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17433,6 +18860,10 @@ camera device for input streams,<wbr/> to their corresponding output formats.<wb
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_REPROC">REPROC</a></li>
@@ -17441,10 +18872,10 @@ camera device for input streams,<wbr/> to their corresponding output formats.<wb
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All camera devices with at least 1
 <a href="#static_android.request.maxNumInputStreams">android.<wbr/>request.<wbr/>max<wbr/>Num<wbr/>Input<wbr/>Streams</a> will have at least one
 available input format.<wbr/></p>
@@ -17492,10 +18923,10 @@ listed as available in this map is not valid.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For the formats,<wbr/> see <code>system/<wbr/>core/<wbr/>include/<wbr/>system/<wbr/>graphics.<wbr/>h</code> for a definition
 of the image format enumerations.<wbr/> The PRIVATE format refers to the
 HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>IMPLEMENTATION_<wbr/>DEFINED format.<wbr/> The HAL could determine
@@ -17536,7 +18967,7 @@ additional formats if it so chooses.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17561,10 +18992,10 @@ additional formats if it so chooses.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OUTPUT</span>
+                    <span class="entry_type_enum_name">OUTPUT (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">INPUT</span>
+                    <span class="entry_type_enum_name">INPUT (v3.2)</span>
                   </li>
                 </ul>
 
@@ -17582,15 +19013,19 @@ camera device supports
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The configurations are listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code>
 tuples.<wbr/></p>
 <p>For a given use case,<wbr/> the actual maximum supported resolution
@@ -17673,10 +19108,10 @@ mandatory stream configurations on a per-capability basis.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>It is recommended (but not mandatory) to also include half/<wbr/>quarter
 of sensor maximum resolution for JPEG formats (regardless of hardware
 level).<wbr/></p>
@@ -17723,7 +19158,7 @@ YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17761,6 +19196,10 @@ format/<wbr/>size combination.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -17769,10 +19208,10 @@ format/<wbr/>size combination.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This should correspond to the frame duration when only that
 stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
 set to either OFF or FAST.<wbr/></p>
@@ -17783,13 +19222,11 @@ is the same regardless of whether the stream is input or output.<wbr/></p>
 <p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
 <a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
 calculating the max frame rate.<wbr/></p>
-<p>(Keep in sync with
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>)</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17827,6 +19264,10 @@ output format/<wbr/>size combination.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -17835,10 +19276,10 @@ output format/<wbr/>size combination.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>A stall duration is how much extra time would get added
 to the normal minimum frame duration for a repeating request
 that has streams with non-zero stall.<wbr/></p>
@@ -17887,29 +19328,28 @@ ignored).<wbr/></p>
 <ul>
 <li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#YUV_420_888">Image<wbr/>Format#YUV_<wbr/>420_<wbr/>888</a></li>
 <li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW10">ImageFormat#RAW10</a></li>
+<li><a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#RAW12">ImageFormat#RAW12</a></li>
 </ul>
 <p>All other formats may or may not have an allowed stall duration on
 a per-capability basis; refer to <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>
 for more details.<wbr/></p>
 <p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> for more information about
 calculating the max frame rate (absent stalls).<wbr/></p>
-<p>(Keep up to date with
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a> )</p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If possible,<wbr/> it is recommended that all non-JPEG formats
 (such as RAW16) should not have a stall duration.<wbr/> RAW10,<wbr/> RAW12,<wbr/> RAW_<wbr/>OPAQUE
 and IMPLEMENTATION_<wbr/>DEFINED must not have stall durations.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -17944,15 +19384,19 @@ and the stall durations for each format/<wbr/>size combination.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All camera devices will support sensor maximum resolution (defined by
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>) for the JPEG format.<wbr/></p>
 <p>For a given use case,<wbr/> the actual maximum supported resolution
@@ -18048,10 +19492,10 @@ ratio 4:3,<wbr/> and the JPEG encoder alignment requirement is 16,<wbr/> the max
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Do not set this property directly
 (it is synthetic and will not be available at the HAL layer);
 set the <a href="#static_android.scaler.availableStreamConfigurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stream<wbr/>Configurations</a> instead.<wbr/></p>
@@ -18090,7 +19534,7 @@ YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -18111,11 +19555,11 @@ YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">CENTER_ONLY</span>
+                    <span class="entry_type_enum_name">CENTER_ONLY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device only supports centered crop regions.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FREEFORM</span>
+                    <span class="entry_type_enum_name">FREEFORM (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device supports arbitrarily chosen crop regions.<wbr/></p></span>
                   </li>
                 </ul>
@@ -18132,15 +19576,19 @@ YUV_<wbr/>420_<wbr/>888 must also be available for IMPLEMENTATION_<wbr/>DEFINED.
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When passing a non-centered crop region (<a href="#controls_android.scaler.cropRegion">android.<wbr/>scaler.<wbr/>crop<wbr/>Region</a>) to a camera
 device that only supports CENTER_<wbr/>ONLY cropping,<wbr/> the camera device will move the
 crop region to the center of the sensor active array (<a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>)
@@ -18154,14 +19602,14 @@ return the final used crop region in capture result metadata <a href="#controls_
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -18170,6 +19618,7 @@ return the final used crop region in capture result metadata <a href="#controls_
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -18219,6 +19668,10 @@ return the final used crop region in capture result metadata <a href="#controls_
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -18227,10 +19680,10 @@ return the final used crop region in capture result metadata <a href="#controls_
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This control can be used to implement digital zoom.<wbr/></p>
 <p>The crop region coordinate system is based off
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with <code>(0,<wbr/> 0)</code> being the
@@ -18265,10 +19718,10 @@ result.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The output streams must maintain square pixels at all
 times,<wbr/> no matter what the relative aspect ratios of the
 crop region and the stream are.<wbr/>  Negative values for
@@ -18312,7 +19765,7 @@ hardware limitations.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -18321,10 +19774,10 @@ hardware limitations.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_sensor" class="section">sensor</td></tr>
+  <tr><td colspan="7" id="section_sensor" class="section">sensor</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -18333,6 +19786,7 @@ hardware limitations.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -18379,6 +19833,10 @@ light.<wbr/></p>
               <p><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -18387,10 +19845,10 @@ light.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If the sensor can't expose this exact duration,<wbr/> it will shorten the
 duration exposed to the nearest possible value (rather than expose longer).<wbr/>
 The final exposure time used will be available in the output capture result.<wbr/></p>
@@ -18400,7 +19858,7 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -18432,9 +19890,12 @@ start of next frame exposure.<wbr/></p>
             </td>
 
             <td class="entry_range">
-              <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
-<a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/> The duration
-is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
+              <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/> <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>.<wbr/>
+The duration is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
             </td>
 
             <td class="entry_tags">
@@ -18445,10 +19906,10 @@ is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The maximum frame rate that can be supported by a camera subsystem is
 a function of many factors:</p>
 <ul>
@@ -18484,54 +19945,45 @@ before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
 can run concurrently to the rest of the camera pipeline,<wbr/> but
 cannot process more than 1 capture at a time.<wbr/></li>
 </ul>
-<p>The necessary information for the application,<wbr/> given the model above,<wbr/>
-is provided via the <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> field using
+<p>The necessary information for the application,<wbr/> given the model above,<wbr/> is provided via
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/>
-These are used to determine the maximum frame rate /<wbr/> minimum frame
-duration that is possible for a given stream configuration.<wbr/></p>
+These are used to determine the maximum frame rate /<wbr/> minimum frame duration that is
+possible for a given stream configuration.<wbr/></p>
 <p>Specifically,<wbr/> the application can use the following rules to
 determine the minimum frame duration it can request from the camera
 device:</p>
 <ol>
-<li>Let the set of currently configured input/<wbr/>output streams
-be called <code>S</code>.<wbr/></li>
-<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking
-it up in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> using <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
-(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be
-called <code>F</code>.<wbr/></li>
-<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
-for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
-used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
+<li>Let the set of currently configured input/<wbr/>output streams be called <code>S</code>.<wbr/></li>
+<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking it up in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
+(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be called <code>F</code>.<wbr/></li>
+<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed for <code>R</code> is the maximum
+out of all values in <code>F</code>.<wbr/> Let the streams used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
 </ol>
 <p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>
-using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code>
-determines the steady state frame rate that the application will get
-if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind of
-request be called <code>Rsimple</code>.<wbr/></p>
-<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
-by a single capture of a new request <code>Rstall</code> (which has at least
-one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
-same minimum frame duration this will not cause a frame rate loss
-if all buffers from the previous <code>Rstall</code> have already been
-delivered.<wbr/></p>
-<p>For more details about stalling,<wbr/> see
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
+using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code> determines the steady
+state frame rate that the application will get if it uses <code>R</code> as a repeating request.<wbr/> Let
+this special kind of request be called <code>Rsimple</code>.<wbr/></p>
+<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved by a single capture of a
+new request <code>Rstall</code> (which has at least one in-use stream with a non-0 stall time) and if
+<code>Rstall</code> has the same minimum frame duration this will not cause a frame rate loss if all
+buffers from the previous <code>Rstall</code> have already been delivered.<wbr/></p>
+<p>For more details about stalling,<wbr/> see <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
 <p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
 OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For more details about stalling,<wbr/> see
 <a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -18566,6 +20018,10 @@ before processing.<wbr/></p>
               <p><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -18574,10 +20030,10 @@ before processing.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The sensitivity is the standard ISO sensitivity value,<wbr/>
 as defined in ISO 12232:2006.<wbr/></p>
 <p>The sensitivity must be within <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a>,<wbr/> and
@@ -18593,15 +20049,15 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>ISO 12232:2006 REI method is acceptable.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -18637,15 +20093,19 @@ when <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wb
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Each color channel is treated as an unsigned 32-bit integer.<wbr/>
 The camera device then uses the most significant X bits
 that correspond to how many bits are in its Bayer raw sensor
@@ -18656,15 +20116,15 @@ output.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -18684,13 +20144,13 @@ output.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No test pattern mode is used,<wbr/> and the camera
 device returns captures from the image sensor.<wbr/></p>
 <p>This is the default if the key is not set.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SOLID_COLOR</span>
+                    <span class="entry_type_enum_name">SOLID_COLOR (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Each pixel in <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> is replaced by its
 respective color channel provided in
 <a href="#controls_android.sensor.testPatternData">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data</a>.<wbr/></p>
@@ -18704,7 +20164,7 @@ respective color channel provided in
 are 100% green.<wbr/> All blue pixels are 100% black.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">COLOR_BARS</span>
+                    <span class="entry_type_enum_name">COLOR_BARS (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>All pixel data is replaced with an 8-bar color pattern.<wbr/></p>
 <p>The vertical bars (left-to-right) are as follows:</p>
 <ul>
@@ -18739,7 +20199,7 @@ pixel array height.<wbr/></p>
 0% intensity or 100% intensity.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY</span>
+                    <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The test pattern is similar to COLOR_<wbr/>BARS,<wbr/> except that
 each bar should start at its specified color at the top,<wbr/>
 and fade to gray at the bottom.<wbr/></p>
@@ -18755,7 +20215,7 @@ When this is not the case,<wbr/> the pattern should repeat at the bottom
 of the image.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PN9</span>
+                    <span class="entry_type_enum_name">PN9 (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>All pixel data is replaced by a pseudo-random sequence
 generated from a PN9 512-bit sequence (typically implemented
 in hardware with a linear feedback shift register).<wbr/></p>
@@ -18764,7 +20224,7 @@ and thus each subsequent raw frame with this test pattern should
 be exactly the same as the last.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CUSTOM1</span>
+                    <span class="entry_type_enum_name">CUSTOM1 (v3.2)</span>
                     <span class="entry_type_enum_value">256</span>
                     <span class="entry_type_enum_notes"><p>The first custom test pattern.<wbr/> All custom patterns that are
 available only on this camera device are at least this numeric
@@ -18788,15 +20248,19 @@ doing a real exposure from the camera.<wbr/></p>
               <p><a href="#static_android.sensor.availableTestPatternModes">android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When a test pattern is enabled,<wbr/> all manual sensor controls specified
 by android.<wbr/>sensor.<wbr/>* will be ignored.<wbr/> All other controls should
 work as normal.<wbr/></p>
@@ -18808,10 +20272,10 @@ would not actually affect it).<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All test patterns are specified in the Bayer domain.<wbr/></p>
 <p>The HAL may choose to substitute test patterns from the sensor
 with test patterns from on-device memory.<wbr/> In that case,<wbr/> it should be
@@ -18820,14 +20284,14 @@ sensor interconnect bus (such as CSI2) or memory.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -18836,6 +20300,7 @@ sensor interconnect bus (such as CSI2) or memory.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -18888,6 +20353,10 @@ distortion correction has been applied.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -18896,10 +20365,10 @@ distortion correction has been applied.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is the rectangle representing the size of the active region of the sensor (i.<wbr/>e.<wbr/>
 the region that actually receives light from the scene) after any geometric correction
 has been applied,<wbr/> and should be treated as the maximum size in pixels of any of the
@@ -18917,17 +20386,17 @@ resulting in scaling or cropping may have been applied.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This array contains <code>(xmin,<wbr/> ymin,<wbr/> width,<wbr/> height)</code>.<wbr/> The <code>(xmin,<wbr/> ymin)</code> must be
 &gt;= <code>(0,<wbr/>0)</code>.<wbr/>
 The <code>(width,<wbr/> height)</code> must be &lt;= <code><a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a></code>.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -18966,6 +20435,10 @@ camera device.<wbr/></p>
               <p>Min &lt;= 100,<wbr/> Max &gt;= 800</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -18975,17 +20448,17 @@ camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The values are the standard ISO sensitivity values,<wbr/>
 as defined in ISO 12232:2006.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19006,19 +20479,19 @@ as defined in ISO 12232:2006.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">RGGB</span>
+                    <span class="entry_type_enum_name">RGGB (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">GRBG</span>
+                    <span class="entry_type_enum_name">GRBG (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">GBRG</span>
+                    <span class="entry_type_enum_name">GBRG (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">BGGR</span>
+                    <span class="entry_type_enum_name">BGGR (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">RGB</span>
+                    <span class="entry_type_enum_name">RGB (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Sensor is not Bayer; output has 3 16-bit
 values for each pixel,<wbr/> instead of just 1 16-bit value
 per pixel.<wbr/></p></span>
@@ -19039,6 +20512,10 @@ the sensor,<wbr/> in reading order.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19048,7 +20525,7 @@ the sensor,<wbr/> in reading order.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19090,6 +20567,10 @@ capability devices (<a href="#static_android.info.supportedHardwareLevel">androi
 the maximum exposure time will be greater than 100ms.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -19099,17 +20580,17 @@ the maximum exposure time will be greater than 100ms.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For FULL capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
 The maximum of the range SHOULD be at least 1 second (1e9),<wbr/> MUST be at least
 100ms.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19145,6 +20626,10 @@ The maximum of the range SHOULD be at least 1 second (1e9),<wbr/> MUST be at lea
 (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/> at least 100ms.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -19153,10 +20638,10 @@ The maximum of the range SHOULD be at least 1 second (1e9),<wbr/> MUST be at lea
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Attempting to use frame durations beyond the maximum will result in the frame
 duration being clipped to the maximum.<wbr/> See that control for a full definition of frame
 durations.<wbr/></p>
@@ -19166,10 +20651,10 @@ for the minimum frame duration values.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For FULL capability devices (<a href="#static_android.info.supportedHardwareLevel">android.<wbr/>info.<wbr/>supported<wbr/>Hardware<wbr/>Level</a> == FULL),<wbr/>
 The maximum of the range SHOULD be at least
 1 second (1e9),<wbr/> MUST be at least 100ms (100e6).<wbr/></p>
@@ -19186,7 +20671,7 @@ for details about encoding this scenario.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19225,6 +20710,10 @@ array.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -19234,25 +20723,25 @@ array.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is the physical size of the sensor pixel
 array defined by <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Needed for FOV calculation for old API</p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19290,6 +20779,10 @@ including black calibration pixels.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19299,18 +20792,18 @@ including black calibration pixels.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The pixel count of the full pixel array of the image sensor,<wbr/> which covers
 <a href="#static_android.sensor.info.physicalSize">android.<wbr/>sensor.<wbr/>info.<wbr/>physical<wbr/>Size</a> area.<wbr/>  This represents the full pixel dimensions of
 the raw buffers produced by this sensor.<wbr/></p>
 <p>If a camera device supports raw sensor formats,<wbr/> either this or
 <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> is the maximum dimensions for the raw
-output formats listed in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> (this depends on
-whether or not the image sensor returns buffers containing pixels that are not
-part of the active array region for blacklevel calibration or other purposes).<wbr/></p>
+output formats listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>
+(this depends on whether or not the image sensor returns buffers containing pixels that
+are not part of the active array region for blacklevel calibration or other purposes).<wbr/></p>
 <p>Some parts of the full pixel array may not receive light from the scene,<wbr/>
 or be otherwise inactive.<wbr/>  The <a href="#static_android.sensor.info.preCorrectionActiveArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pre<wbr/>Correction<wbr/>Active<wbr/>Array<wbr/>Size</a> key
 defines the rectangle of active pixels that will be included in processed image
@@ -19319,7 +20812,7 @@ formats.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19351,6 +20844,10 @@ formats.<wbr/></p>
               <p>&gt; 255 (8-bit output)</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19359,10 +20856,10 @@ formats.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This specifies the fully-saturated encoding level for the raw
 sample values from the sensor.<wbr/>  This is typically caused by the
 sensor becoming highly non-linear or clipping.<wbr/> The minimum for
@@ -19381,17 +20878,17 @@ by the camera device,<wbr/> which provides more accurate white level values.<wbr
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The full bit depth of the sensor must be available in the raw data,<wbr/>
 so the value for linear sensors should not be significantly lower
 than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixel).<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19412,7 +20909,7 @@ than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixe
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">UNKNOWN</span>
+                    <span class="entry_type_enum_name">UNKNOWN (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Timestamps from <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> are in nanoseconds and monotonic,<wbr/>
 but can not be compared to timestamps from other subsystems
 (e.<wbr/>g.<wbr/> accelerometer,<wbr/> gyro etc.<wbr/>),<wbr/> or other instances of the same or different
@@ -19421,7 +20918,7 @@ a single camera instance are comparable,<wbr/> and the timestamps for all buffer
 and the result metadata generated by a single capture are identical.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">REALTIME</span>
+                    <span class="entry_type_enum_name">REALTIME (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Timestamps from <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a> are in the same timebase as
 <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,<wbr/>
 and they can be compared to other timestamps using that base.<wbr/></p></span>
@@ -19440,6 +20937,10 @@ and they can be compared to other timestamps using that base.<wbr/></p></span>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -19448,10 +20949,10 @@ and they can be compared to other timestamps using that base.<wbr/></p></span>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The timestamps provided for captures are always in nanoseconds and monotonic,<wbr/> but
 may not based on a time source that can be compared to other system time sources.<wbr/></p>
 <p>This characteristic defines the source for the timestamps,<wbr/> and therefore whether they
@@ -19460,22 +20961,22 @@ can be compared against other system time sources/<wbr/>timestamps.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For camera devices implement UNKNOWN,<wbr/> the camera framework expects that the timestamp
 source to be SYSTEM_<wbr/>TIME_<wbr/>MONOTONIC.<wbr/> For camera devices implement REALTIME,<wbr/> the camera
 framework expects that the timestamp source to be SYSTEM_<wbr/>TIME_<wbr/>BOOTTIME.<wbr/> See
 system/<wbr/>core/<wbr/>include/<wbr/>utils/<wbr/>Timers.<wbr/>h for the definition of SYSTEM_<wbr/>TIME_<wbr/>MONOTONIC and
 SYSTEM_<wbr/>TIME_<wbr/>BOOTTIME.<wbr/> Note that HAL must follow above expectation; otherwise video
 recording might suffer unexpected behavior.<wbr/></p>
-<p>Also,<wbr/> camera devices implements REALTIME must pass the ITS sensor fusion test which
+<p>Also,<wbr/> camera devices which implement REALTIME must pass the ITS sensor fusion test which
 tests the alignment between camera timestamps and gyro sensor timestamps.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19495,10 +20996,10 @@ tests the alignment between camera timestamps and gyro sensor timestamps.<wbr/><
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FALSE</span>
+                    <span class="entry_type_enum_name">FALSE (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TRUE</span>
+                    <span class="entry_type_enum_name">TRUE (v3.2)</span>
                   </li>
                 </ul>
 
@@ -19515,15 +21016,19 @@ lens shading correction.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If TRUE,<wbr/> all images produced by the camera device in the RAW image formats will
 have lens shading correction already applied to it.<wbr/> If FALSE,<wbr/> the images will
 not be adjusted for lens shading correction.<wbr/>
@@ -19534,7 +21039,7 @@ Devices with RAW capability will always report this information in this key.<wbr
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19573,6 +21078,10 @@ application of any geometric distortion correction.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19581,10 +21090,10 @@ application of any geometric distortion correction.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is the rectangle representing the size of the active region of the sensor (i.<wbr/>e.<wbr/>
 the region that actually receives light from the scene) before any geometric correction
 has been applied,<wbr/> and should be treated as the active region rectangle for any of the
@@ -19625,7 +21134,7 @@ relative to the top,<wbr/>left of post-processed YUV output buffer with dimensio
 <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>.<wbr/></p>
 <p>The currently supported fields that correct for geometric distortion are:</p>
 <ol>
-<li><a href="#static_android.lens.radialDistortion">android.<wbr/>lens.<wbr/>radial<wbr/>Distortion</a>.<wbr/></li>
+<li><a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a>.<wbr/></li>
 </ol>
 <p>If all of the geometric distortion fields are no-ops,<wbr/> this rectangle will be the same
 as the post-distortion-corrected rectangle given in
@@ -19639,10 +21148,10 @@ full array may include black calibration pixels or other inactive regions.<wbr/>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This array contains <code>(xmin,<wbr/> ymin,<wbr/> width,<wbr/> height)</code>.<wbr/> The <code>(xmin,<wbr/> ymin)</code> must be
 &gt;= <code>(0,<wbr/>0)</code>.<wbr/>
 The <code>(width,<wbr/> height)</code> must be &lt;= <code><a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a></code>.<wbr/></p>
@@ -19652,7 +21161,7 @@ the same as the post-correction active array region given in
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -19675,84 +21184,84 @@ the same as the post-correction active array region given in
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">DAYLIGHT</span>
+                    <span class="entry_type_enum_name">DAYLIGHT (v3.2)</span>
                     <span class="entry_type_enum_value">1</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FLUORESCENT</span>
+                    <span class="entry_type_enum_name">FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_value">2</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TUNGSTEN</span>
+                    <span class="entry_type_enum_name">TUNGSTEN (v3.2)</span>
                     <span class="entry_type_enum_value">3</span>
                     <span class="entry_type_enum_notes"><p>Incandescent light</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FLASH</span>
+                    <span class="entry_type_enum_name">FLASH (v3.2)</span>
                     <span class="entry_type_enum_value">4</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FINE_WEATHER</span>
+                    <span class="entry_type_enum_name">FINE_WEATHER (v3.2)</span>
                     <span class="entry_type_enum_value">9</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CLOUDY_WEATHER</span>
+                    <span class="entry_type_enum_name">CLOUDY_WEATHER (v3.2)</span>
                     <span class="entry_type_enum_value">10</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SHADE</span>
+                    <span class="entry_type_enum_name">SHADE (v3.2)</span>
                     <span class="entry_type_enum_value">11</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DAYLIGHT_FLUORESCENT</span>
+                    <span class="entry_type_enum_name">DAYLIGHT_FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_value">12</span>
                     <span class="entry_type_enum_notes"><p>D 5700 - 7100K</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">DAY_WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_name">DAY_WHITE_FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_value">13</span>
                     <span class="entry_type_enum_notes"><p>N 4600 - 5400K</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">COOL_WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_name">COOL_WHITE_FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_value">14</span>
                     <span class="entry_type_enum_notes"><p>W 3900 - 4500K</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">WHITE_FLUORESCENT</span>
+                    <span class="entry_type_enum_name">WHITE_FLUORESCENT (v3.2)</span>
                     <span class="entry_type_enum_value">15</span>
                     <span class="entry_type_enum_notes"><p>WW 3200 - 3700K</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">STANDARD_A</span>
+                    <span class="entry_type_enum_name">STANDARD_A (v3.2)</span>
                     <span class="entry_type_enum_value">17</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">STANDARD_B</span>
+                    <span class="entry_type_enum_name">STANDARD_B (v3.2)</span>
                     <span class="entry_type_enum_value">18</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">STANDARD_C</span>
+                    <span class="entry_type_enum_name">STANDARD_C (v3.2)</span>
                     <span class="entry_type_enum_value">19</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">D55</span>
+                    <span class="entry_type_enum_name">D55 (v3.2)</span>
                     <span class="entry_type_enum_value">20</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">D65</span>
+                    <span class="entry_type_enum_name">D65 (v3.2)</span>
                     <span class="entry_type_enum_value">21</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">D75</span>
+                    <span class="entry_type_enum_name">D75 (v3.2)</span>
                     <span class="entry_type_enum_value">22</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">D50</span>
+                    <span class="entry_type_enum_name">D50 (v3.2)</span>
                     <span class="entry_type_enum_value">23</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ISO_STUDIO_TUNGSTEN</span>
+                    <span class="entry_type_enum_name">ISO_STUDIO_TUNGSTEN (v3.2)</span>
                     <span class="entry_type_enum_value">24</span>
                   </li>
                 </ul>
@@ -19772,6 +21281,10 @@ calculating the <a href="#static_android.sensor.colorTransform1">android.<wbr/>s
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19780,10 +21293,10 @@ calculating the <a href="#static_android.sensor.colorTransform1">android.<wbr/>s
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The values in this key correspond to the values defined for the
 EXIF LightSource tag.<wbr/> These illuminants are standard light sources
 that are often used calibrating camera devices.<wbr/></p>
@@ -19797,10 +21310,10 @@ information for improved quality,<wbr/> including
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The first reference illuminant (<a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a>)
 and corresponding matrices must be present to support the RAW capability
 and DNG output.<wbr/></p>
@@ -19822,7 +21335,7 @@ reference illuminant.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19857,6 +21370,10 @@ calculating the <a href="#static_android.sensor.colorTransform2">android.<wbr/>s
               <p>Any value listed in <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19865,10 +21382,10 @@ calculating the <a href="#static_android.sensor.colorTransform2">android.<wbr/>s
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>See <a href="#static_android.sensor.referenceIlluminant1">android.<wbr/>sensor.<wbr/>reference<wbr/>Illuminant1</a> for more details.<wbr/></p>
 <p>If this key is present,<wbr/> then <a href="#static_android.sensor.colorTransform2">android.<wbr/>sensor.<wbr/>color<wbr/>Transform2</a>,<wbr/>
 <a href="#static_android.sensor.calibrationTransform2">android.<wbr/>sensor.<wbr/>calibration<wbr/>Transform2</a>,<wbr/> and
@@ -19877,7 +21394,7 @@ calculating the <a href="#static_android.sensor.colorTransform2">android.<wbr/>s
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19914,6 +21431,10 @@ reference sensor colorspace to the actual device sensor colorspace.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19922,10 +21443,10 @@ reference sensor colorspace to the actual device sensor colorspace.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is used to correct for per-device variations in the
 sensor colorspace,<wbr/> and is used for processing raw buffer data.<wbr/></p>
 <p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
@@ -19938,7 +21459,7 @@ space under the first reference illuminant
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -19976,6 +21497,10 @@ reference sensor colorspace to the actual device sensor colorspace
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -19984,10 +21509,10 @@ reference sensor colorspace to the actual device sensor colorspace
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is used to correct for per-device variations in the
 sensor colorspace,<wbr/> and is used for processing raw buffer data.<wbr/></p>
 <p>The matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and
@@ -20002,7 +21527,7 @@ illuminant is present.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20039,6 +21564,10 @@ reference sensor color space.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -20047,10 +21576,10 @@ reference sensor color space.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is used to convert from the standard CIE XYZ color
 space to the reference sensor colorspace,<wbr/> and is used when processing
 raw buffer data.<wbr/></p>
@@ -20067,7 +21596,7 @@ match the standard white point for the first reference illuminant
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20104,6 +21633,10 @@ reference sensor color space.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -20112,10 +21645,10 @@ reference sensor color space.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is used to convert from the standard CIE XYZ color
 space to the reference sensor colorspace,<wbr/> and is used when processing
 raw buffer data.<wbr/></p>
@@ -20134,7 +21667,7 @@ illuminant is present.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20171,6 +21704,10 @@ sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -20179,10 +21716,10 @@ sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is used to convert to the standard CIE XYZ colorspace,<wbr/> and
 is used when processing raw buffer data.<wbr/></p>
 <p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and contains
@@ -20197,7 +21734,7 @@ CIE XYZ colorspace.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20234,6 +21771,10 @@ sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -20242,10 +21783,10 @@ sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This matrix is used to convert to the standard CIE XYZ colorspace,<wbr/> and
 is used when processing raw buffer data.<wbr/></p>
 <p>This matrix is expressed as a 3x3 matrix in row-major-order,<wbr/> and contains
@@ -20262,7 +21803,7 @@ illuminant is present.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20294,6 +21835,10 @@ ISO=100</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -20303,7 +21848,7 @@ ISO=100</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20341,6 +21886,10 @@ ISO=100</p>
               <p>&gt;= 0 for each.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -20349,10 +21898,10 @@ ISO=100</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This key specifies the zero light value for each of the CFA mosaic
 channels in the camera sensor.<wbr/>  The maximal value output by the
 sensor is represented by the value in <a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a>.<wbr/></p>
@@ -20373,16 +21922,16 @@ level values for each frame.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The values are given in row-column scan order,<wbr/> with the first value
 corresponding to the element of the CFA in row=0,<wbr/> column=0.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20415,6 +21964,10 @@ purely through analog gain.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -20424,10 +21977,10 @@ purely through analog gain.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a> values less than or
 equal to this,<wbr/> all applied gain must be analog.<wbr/> For
 values above this,<wbr/> the gain applied can be a mix of analog and
@@ -20436,7 +21989,7 @@ digital.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20472,6 +22025,10 @@ upright on the device screen in its native orientation.<wbr/></p>
               <p>0,<wbr/> 90,<wbr/> 180,<wbr/> 270</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -20480,17 +22037,17 @@ upright on the device screen in its native orientation.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Also defines the direction of rolling shutter readout,<wbr/> which is from top to bottom in
 the sensor's coordinate system.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20530,6 +22087,10 @@ Saturation &gt;= 2,<wbr/>
 Value &gt;= 1</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -20538,10 +22099,10 @@ Value &gt;= 1</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The number of input samples for the hue,<wbr/> saturation,<wbr/> and value
 dimension of <a href="#dynamic_android.sensor.profileHueSatMap">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map</a>.<wbr/> The order of the
 dimensions given is hue,<wbr/> saturation,<wbr/> value; where hue is the 0th
@@ -20550,7 +22111,7 @@ element.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20588,29 +22149,33 @@ supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Defaults to OFF,<wbr/> and always includes OFF if defined.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All custom modes must be &gt;= CUSTOM1.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20646,15 +22211,19 @@ optically shielded black pixel regions.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>In most camera sensors,<wbr/> the active array is surrounded by some
 optically shielded pixel areas.<wbr/> By blocking light,<wbr/> these pixels
 provides a reliable black reference for black level compensation
@@ -20672,10 +22241,10 @@ black level of each captured raw images.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This array contains (xmin,<wbr/> ymin,<wbr/> width,<wbr/> height).<wbr/> The (xmin,<wbr/> ymin)
 must be &gt;= (0,<wbr/>0) and &lt;=
 <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/> The (width,<wbr/> height) must be
@@ -20689,7 +22258,7 @@ multiple rectangles.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20726,15 +22295,19 @@ multiple rectangles.<wbr/></p>
 the camera</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This configurations are listed as <code>(width,<wbr/> height,<wbr/> size_<wbr/>in_<wbr/>bytes)</code> tuples.<wbr/>
 This is used for sizing the gralloc buffers for opaque RAW buffers.<wbr/>
 All RAW_<wbr/>OPAQUE output stream configuration listed in
@@ -20744,26 +22317,26 @@ this key.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>This key is added in HAL3.<wbr/>4.<wbr/>
-For HAL3.<wbr/>4 or above: devices advertising RAW_<wbr/>OPAQUE format output must list this key.<wbr/>
-For HAL3.<wbr/>3 or earlier devices: if RAW_<wbr/>OPAQUE ouput is advertised,<wbr/> camera framework
-will derive this key by assuming each pixel takes two bytes and no padding bytes
+            <td class="entry_details" colspan="6">
+              <p>This key is added in legacy HAL3.<wbr/>4.<wbr/></p>
+<p>For legacy HAL3.<wbr/>4 or above: devices advertising RAW_<wbr/>OPAQUE format output must list this
+key.<wbr/>  For legacy HAL3.<wbr/>3 or earlier devices: if RAW_<wbr/>OPAQUE ouput is advertised,<wbr/> camera
+framework will derive this key by assuming each pixel takes two bytes and no padding bytes
 between rows.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -20772,6 +22345,7 @@ between rows.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -20818,6 +22392,10 @@ light.<wbr/></p>
               <p><a href="#static_android.sensor.info.exposureTimeRange">android.<wbr/>sensor.<wbr/>info.<wbr/>exposure<wbr/>Time<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -20826,10 +22404,10 @@ light.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If the sensor can't expose this exact duration,<wbr/> it will shorten the
 duration exposed to the nearest possible value (rather than expose longer).<wbr/>
 The final exposure time used will be available in the output capture result.<wbr/></p>
@@ -20839,7 +22417,7 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -20871,9 +22449,12 @@ start of next frame exposure.<wbr/></p>
             </td>
 
             <td class="entry_range">
-              <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/>
-<a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a>.<wbr/> The duration
-is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
+              <p>See <a href="#static_android.sensor.info.maxFrameDuration">android.<wbr/>sensor.<wbr/>info.<wbr/>max<wbr/>Frame<wbr/>Duration</a>,<wbr/> <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>.<wbr/>
+The duration is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
             </td>
 
             <td class="entry_tags">
@@ -20884,10 +22465,10 @@ is capped to <code>max(duration,<wbr/> exposureTime + overhead)</code>.<wbr/></p
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The maximum frame rate that can be supported by a camera subsystem is
 a function of many factors:</p>
 <ul>
@@ -20923,54 +22504,45 @@ before data is consumed/<wbr/>produced by that stream.<wbr/> These processors
 can run concurrently to the rest of the camera pipeline,<wbr/> but
 cannot process more than 1 capture at a time.<wbr/></li>
 </ul>
-<p>The necessary information for the application,<wbr/> given the model above,<wbr/>
-is provided via the <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> field using
+<p>The necessary information for the application,<wbr/> given the model above,<wbr/> is provided via
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/>
-These are used to determine the maximum frame rate /<wbr/> minimum frame
-duration that is possible for a given stream configuration.<wbr/></p>
+These are used to determine the maximum frame rate /<wbr/> minimum frame duration that is
+possible for a given stream configuration.<wbr/></p>
 <p>Specifically,<wbr/> the application can use the following rules to
 determine the minimum frame duration it can request from the camera
 device:</p>
 <ol>
-<li>Let the set of currently configured input/<wbr/>output streams
-be called <code>S</code>.<wbr/></li>
-<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking
-it up in <a href="#static_android.scaler.streamConfigurationMap">android.<wbr/>scaler.<wbr/>stream<wbr/>Configuration<wbr/>Map</a> using <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
-(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be
-called <code>F</code>.<wbr/></li>
-<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed
-for <code>R</code> is the maximum out of all values in <code>F</code>.<wbr/> Let the streams
-used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
+<li>Let the set of currently configured input/<wbr/>output streams be called <code>S</code>.<wbr/></li>
+<li>Find the minimum frame durations for each stream in <code>S</code>,<wbr/> by looking it up in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>
+(with its respective size/<wbr/>format).<wbr/> Let this set of frame durations be called <code>F</code>.<wbr/></li>
+<li>For any given request <code>R</code>,<wbr/> the minimum frame duration allowed for <code>R</code> is the maximum
+out of all values in <code>F</code>.<wbr/> Let the streams used in <code>R</code> be called <code>S_<wbr/>r</code>.<wbr/></li>
 </ol>
 <p>If none of the streams in <code>S_<wbr/>r</code> have a stall time (listed in <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>
-using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code>
-determines the steady state frame rate that the application will get
-if it uses <code>R</code> as a repeating request.<wbr/> Let this special kind of
-request be called <code>Rsimple</code>.<wbr/></p>
-<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
-by a single capture of a new request <code>Rstall</code> (which has at least
-one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
-same minimum frame duration this will not cause a frame rate loss
-if all buffers from the previous <code>Rstall</code> have already been
-delivered.<wbr/></p>
-<p>For more details about stalling,<wbr/> see
-<a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
+using its respective size/<wbr/>format),<wbr/> then the frame duration in <code>F</code> determines the steady
+state frame rate that the application will get if it uses <code>R</code> as a repeating request.<wbr/> Let
+this special kind of request be called <code>Rsimple</code>.<wbr/></p>
+<p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved by a single capture of a
+new request <code>Rstall</code> (which has at least one in-use stream with a non-0 stall time) and if
+<code>Rstall</code> has the same minimum frame duration this will not cause a frame rate loss if all
+buffers from the previous <code>Rstall</code> have already been delivered.<wbr/></p>
+<p>For more details about stalling,<wbr/> see <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputStallDuration">StreamConfigurationMap#getOutputStallDuration</a>.<wbr/></p>
 <p>This control is only effective if <a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> or <a href="#controls_android.control.mode">android.<wbr/>control.<wbr/>mode</a> is set to
 OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For more details about stalling,<wbr/> see
 <a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a>.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21005,6 +22577,10 @@ before processing.<wbr/></p>
               <p><a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -21013,10 +22589,10 @@ before processing.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The sensitivity is the standard ISO sensitivity value,<wbr/>
 as defined in ISO 12232:2006.<wbr/></p>
 <p>The sensitivity must be within <a href="#static_android.sensor.info.sensitivityRange">android.<wbr/>sensor.<wbr/>info.<wbr/>sensitivity<wbr/>Range</a>,<wbr/> and
@@ -21032,15 +22608,15 @@ OFF; otherwise the auto-exposure algorithm will override this value.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>ISO 12232:2006 REI method is acceptable.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21075,6 +22651,10 @@ row of the image sensor active array,<wbr/> in nanoseconds.<wbr/></p>
               <p>&gt; 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -21083,10 +22663,10 @@ row of the image sensor active array,<wbr/> in nanoseconds.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The timestamps are also included in all image
 buffers produced for the same capture,<wbr/> and will be identical
 on all the outputs.<wbr/></p>
@@ -21107,10 +22687,10 @@ reprocess capture request.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All timestamps must be in reference to the kernel's
 CLOCK_<wbr/>BOOTTIME monotonic clock,<wbr/> which properly accounts for
 time spent asleep.<wbr/> This allows for synchronization with
@@ -21125,7 +22705,7 @@ capture request.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21161,6 +22741,10 @@ somewhere close to it.<wbr/></p>
               <p>Optional.<wbr/> This value is missing if no temperature is available.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -21170,7 +22754,7 @@ somewhere close to it.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21206,6 +22790,10 @@ the time of capture.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -21214,10 +22802,10 @@ the time of capture.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value gives the neutral color point encoded as an RGB value in the
 native sensor color space.<wbr/>  The neutral color point indicates the
 currently estimated white point of the scene illumination.<wbr/>  It can be
@@ -21228,7 +22816,7 @@ processing raw sensor data.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21264,6 +22852,10 @@ processing raw sensor data.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -21272,10 +22864,10 @@ processing raw sensor data.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This key contains two noise model coefficients for each CFA channel
 corresponding to the sensor amplification (S) and sensor readout
 noise (O).<wbr/>  These are given as pairs of coefficients for each channel
@@ -21297,10 +22889,10 @@ Adobe DNG specification for the NoiseProfile tag.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For a CFA layout of RGGB,<wbr/> the list of coefficients would be given as
 an array of doubles S0,<wbr/>O0,<wbr/>S1,<wbr/>O1,...,<wbr/> where S0 and O0 are the coefficients
 for the red channel,<wbr/> S1 and O1 are the coefficients for the first green
@@ -21308,7 +22900,7 @@ channel,<wbr/> etc.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21349,6 +22941,10 @@ for each pixel.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -21357,10 +22953,10 @@ for each pixel.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>hue_<wbr/>samples,<wbr/> saturation_<wbr/>samples,<wbr/> and value_<wbr/>samples are given in
 <a href="#static_android.sensor.profileHueSatMapDimensions">android.<wbr/>sensor.<wbr/>profile<wbr/>Hue<wbr/>Sat<wbr/>Map<wbr/>Dimensions</a>.<wbr/></p>
 <p>Each entry of this map contains three floats corresponding to the
@@ -21374,7 +22970,7 @@ value scale factor of 1.<wbr/>0.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21413,6 +23009,10 @@ value scale factor of 1.<wbr/>0.<wbr/></p>
 sample is required to be <code>(1,<wbr/> 1)</code>.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -21421,10 +23021,10 @@ sample is required to be <code>(1,<wbr/> 1)</code>.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This key contains a default tone curve that can be applied while
 processing the image as a starting point for user adjustments.<wbr/>
 The curve is specified as a list of value pairs in linear gamma.<wbr/>
@@ -21433,7 +23033,7 @@ The curve is interpolated using a cubic spline.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21465,6 +23065,10 @@ The curve is interpolated using a cubic spline.<wbr/></p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -21473,10 +23077,10 @@ The curve is interpolated using a cubic spline.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value is an estimate of the worst case split between the
 Bayer green channels in the red and blue rows in the sensor color
 filter array.<wbr/></p>
@@ -21511,10 +23115,10 @@ a usuable image (&gt;20% divergence).<wbr/></li>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The green split given may be a static value based on prior
 characterization of the camera sensor using the green split
 calculation method given here over a large,<wbr/> representative,<wbr/> sample
@@ -21523,7 +23127,7 @@ results,<wbr/> and can be interpreted in the same manner,<wbr/> may be used.<wbr
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21559,15 +23163,19 @@ when <a href="#controls_android.sensor.testPatternMode">android.<wbr/>sensor.<wb
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Each color channel is treated as an unsigned 32-bit integer.<wbr/>
 The camera device then uses the most significant X bits
 that correspond to how many bits are in its Bayer raw sensor
@@ -21578,15 +23186,15 @@ output.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21606,13 +23214,13 @@ output.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No test pattern mode is used,<wbr/> and the camera
 device returns captures from the image sensor.<wbr/></p>
 <p>This is the default if the key is not set.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SOLID_COLOR</span>
+                    <span class="entry_type_enum_name">SOLID_COLOR (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Each pixel in <code>[R,<wbr/> G_<wbr/>even,<wbr/> G_<wbr/>odd,<wbr/> B]</code> is replaced by its
 respective color channel provided in
 <a href="#controls_android.sensor.testPatternData">android.<wbr/>sensor.<wbr/>test<wbr/>Pattern<wbr/>Data</a>.<wbr/></p>
@@ -21626,7 +23234,7 @@ respective color channel provided in
 are 100% green.<wbr/> All blue pixels are 100% black.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">COLOR_BARS</span>
+                    <span class="entry_type_enum_name">COLOR_BARS (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>All pixel data is replaced with an 8-bar color pattern.<wbr/></p>
 <p>The vertical bars (left-to-right) are as follows:</p>
 <ul>
@@ -21661,7 +23269,7 @@ pixel array height.<wbr/></p>
 0% intensity or 100% intensity.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY</span>
+                    <span class="entry_type_enum_name">COLOR_BARS_FADE_TO_GRAY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The test pattern is similar to COLOR_<wbr/>BARS,<wbr/> except that
 each bar should start at its specified color at the top,<wbr/>
 and fade to gray at the bottom.<wbr/></p>
@@ -21677,7 +23285,7 @@ When this is not the case,<wbr/> the pattern should repeat at the bottom
 of the image.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PN9</span>
+                    <span class="entry_type_enum_name">PN9 (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>All pixel data is replaced by a pseudo-random sequence
 generated from a PN9 512-bit sequence (typically implemented
 in hardware with a linear feedback shift register).<wbr/></p>
@@ -21686,7 +23294,7 @@ and thus each subsequent raw frame with this test pattern should
 be exactly the same as the last.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">CUSTOM1</span>
+                    <span class="entry_type_enum_name">CUSTOM1 (v3.2)</span>
                     <span class="entry_type_enum_value">256</span>
                     <span class="entry_type_enum_notes"><p>The first custom test pattern.<wbr/> All custom patterns that are
 available only on this camera device are at least this numeric
@@ -21710,15 +23318,19 @@ doing a real exposure from the camera.<wbr/></p>
               <p><a href="#static_android.sensor.availableTestPatternModes">android.<wbr/>sensor.<wbr/>available<wbr/>Test<wbr/>Pattern<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When a test pattern is enabled,<wbr/> all manual sensor controls specified
 by android.<wbr/>sensor.<wbr/>* will be ignored.<wbr/> All other controls should
 work as normal.<wbr/></p>
@@ -21730,10 +23342,10 @@ would not actually affect it).<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>All test patterns are specified in the Bayer domain.<wbr/></p>
 <p>The HAL may choose to substitute test patterns from the sensor
 with test patterns from on-device memory.<wbr/> In that case,<wbr/> it should be
@@ -21742,7 +23354,7 @@ sensor interconnect bus (such as CSI2) or memory.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21778,6 +23390,10 @@ and the start of last row exposure.<wbr/></p>
 <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -21786,10 +23402,10 @@ and the start of last row exposure.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is the exposure time skew between the first and last
 row exposure start times.<wbr/> The first row and the last row are
 the first and last rows inside of the
@@ -21800,16 +23416,16 @@ to the frame readout time.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The HAL must report <code>0</code> if the sensor is using global shutter,<wbr/> where all pixels begin
 exposure at the same time.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21847,6 +23463,10 @@ arrangement (CFA) mosaic channels.<wbr/></p>
               <p>&gt;= 0 for each.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -21855,10 +23475,10 @@ arrangement (CFA) mosaic channels.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Camera sensor black levels may vary dramatically for different
 capture settings (e.<wbr/>g.<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>).<wbr/> The fixed black
 level reported by <a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a> may be too
@@ -21881,23 +23501,22 @@ optical black pixels reported by <a href="#static_android.sensor.opticalBlackReg
 layout key (see <a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a>),<wbr/> i.<wbr/>e.<wbr/> the
 nth value given corresponds to the black level offset for the nth
 color channel listed in the CFA.<wbr/></p>
-<p>This key will be available if <a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a> is
-available or the camera device advertises this key via
-<a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.<wbr/></p>
+<p>This key will be available if <a href="#static_android.sensor.opticalBlackRegions">android.<wbr/>sensor.<wbr/>optical<wbr/>Black<wbr/>Regions</a> is available or the
+camera device advertises this key via <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getAvailableCaptureResultKeys">CameraCharacteristics#getAvailableCaptureResultKeys</a>.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The values are given in row-column scan order,<wbr/> with the first value
 corresponding to the element of the CFA in row=0,<wbr/> column=0.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -21929,6 +23548,10 @@ corresponding to the element of the CFA in row=0,<wbr/> column=0.<wbr/></p>
               <p>&gt;= 0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -21937,10 +23560,10 @@ corresponding to the element of the CFA in row=0,<wbr/> column=0.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Since the <a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a> may change for different
 capture settings (e.<wbr/>g.,<wbr/> <a href="#controls_android.sensor.sensitivity">android.<wbr/>sensor.<wbr/>sensitivity</a>),<wbr/> the white
 level will change accordingly.<wbr/> This key is similar to
@@ -21953,17 +23576,17 @@ available or the camera device advertises this key via
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The full bit depth of the sensor must be available in the raw data,<wbr/>
 so the value for linear sensors should not be significantly lower
 than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixel).<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -21972,10 +23595,10 @@ than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixe
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_shading" class="section">shading</td></tr>
+  <tr><td colspan="7" id="section_shading" class="section">shading</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -21984,6 +23607,7 @@ than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixe
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -22016,16 +23640,16 @@ than maximum raw value supported,<wbr/> i.<wbr/>e.<wbr/> 2^(sensor bits per pixe
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No lens shading correction is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply lens shading corrections,<wbr/> without slowing
 frame rate relative to sensor raw output</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply high-quality lens shading correction,<wbr/> at the
 cost of possibly reduced frame rate.<wbr/></p></span>
                   </li>
@@ -22045,15 +23669,19 @@ to the image data.<wbr/></p>
               <p><a href="#static_android.shading.availableModes">android.<wbr/>shading.<wbr/>available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to OFF mode,<wbr/> no lens shading correction will be applied by the
 camera device,<wbr/> and an identity lens shading map data will be provided
 if <code><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON</code>.<wbr/> For example,<wbr/> for lens
@@ -22081,7 +23709,7 @@ to be converged before using the returned shading map data.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22115,6 +23743,10 @@ applied to the images</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -22124,14 +23756,14 @@ applied to the images</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -22140,6 +23772,7 @@ applied to the images</p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -22172,16 +23805,16 @@ applied to the images</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>No lens shading correction is applied.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply lens shading corrections,<wbr/> without slowing
 frame rate relative to sensor raw output</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Apply high-quality lens shading correction,<wbr/> at the
 cost of possibly reduced frame rate.<wbr/></p></span>
                   </li>
@@ -22201,15 +23834,19 @@ to the image data.<wbr/></p>
               <p><a href="#static_android.shading.availableModes">android.<wbr/>shading.<wbr/>available<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to OFF mode,<wbr/> no lens shading correction will be applied by the
 camera device,<wbr/> and an identity lens shading map data will be provided
 if <code><a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a> == ON</code>.<wbr/> For example,<wbr/> for lens
@@ -22237,14 +23874,14 @@ to be converged before using the returned shading map data.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -22253,6 +23890,7 @@ to be converged before using the returned shading map data.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -22302,15 +23940,19 @@ to be converged before using the returned shading map data.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.shading.mode">android.<wbr/>shading.<wbr/>mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This list contains lens shading modes that can be set for the camera device.<wbr/>
 Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always
 list OFF and FAST mode.<wbr/> This includes all FULL level devices.<wbr/>
@@ -22319,10 +23961,10 @@ LEGACY devices will always only support FAST mode.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if lens shading correction control is
 available on the camera device,<wbr/> but the underlying implementation can be the same for
 both modes.<wbr/> That is,<wbr/> if the highest quality implementation on the camera device does not
@@ -22330,7 +23972,7 @@ slow down capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -22339,10 +23981,10 @@ slow down capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_statistics" class="section">statistics</td></tr>
+  <tr><td colspan="7" id="section_statistics" class="section">statistics</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -22351,6 +23993,7 @@ slow down capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -22383,17 +24026,17 @@ slow down capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Do not include face detection statistics in capture
 results.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SIMPLE</span>
+                    <span class="entry_type_enum_name">SIMPLE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Return face rectangle and confidence values only.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_name">FULL (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Return all face
 metadata.<wbr/></p>
@@ -22415,6 +24058,10 @@ unit.<wbr/></p>
               <p><a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -22423,10 +24070,10 @@ unit.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Whether face detection is enabled,<wbr/> and whether it
 should output just the basic fields or the full set of
 fields.<wbr/></p>
@@ -22434,10 +24081,10 @@ fields.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>SIMPLE mode must fill in <a href="#dynamic_android.statistics.faceRectangles">android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles</a> and
 <a href="#dynamic_android.statistics.faceScores">android.<wbr/>statistics.<wbr/>face<wbr/>Scores</a>.<wbr/>
 FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">android.<wbr/>statistics.<wbr/>face<wbr/>Ids</a>,<wbr/> and
@@ -22445,7 +24092,7 @@ FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">androi
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22465,10 +24112,10 @@ FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">androi
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -22485,6 +24132,10 @@ generation</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -22494,7 +24145,7 @@ generation</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22514,10 +24165,10 @@ generation</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -22534,6 +24185,10 @@ generation</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -22543,7 +24198,7 @@ generation</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22563,11 +24218,11 @@ generation</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Hot pixel map production is disabled.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Hot pixel map production is enabled.<wbr/></p></span>
                   </li>
                 </ul>
@@ -22585,6 +24240,10 @@ generation</p>
               <p><a href="#static_android.statistics.info.availableHotPixelMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -22594,17 +24253,17 @@ generation</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If set to <code>true</code>,<wbr/> a hot pixel map is returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/>
 If set to <code>false</code>,<wbr/> no hot pixel map will be returned.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22625,11 +24284,11 @@ If set to <code>false</code>,<wbr/> no hot pixel map will be returned.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Do not include a lens shading map in the capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Include a lens shading map in the capture result.<wbr/></p></span>
                   </li>
                 </ul>
@@ -22648,6 +24307,10 @@ shading map in output result metadata.<wbr/></p>
               <p><a href="#static_android.statistics.info.availableLensShadingMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -22656,10 +24319,10 @@ shading map in output result metadata.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to ON,<wbr/>
 <a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> will be provided in
 the output result metadata.<wbr/></p>
@@ -22668,14 +24331,67 @@ the output result metadata.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.statistics.oisDataMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>ois<wbr/>Data<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Do not include OIS data in the capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Include OIS data in the capture result.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A control for selecting whether OIS position information is included in output
+result metadata.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableOisDataModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Ois<wbr/>Data<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -22684,6 +24400,7 @@ the output result metadata.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -22736,21 +24453,25 @@ supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>OFF is always supported.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22783,6 +24504,10 @@ supported</p>
               <p>&gt;= 64</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -22792,7 +24517,7 @@ supported</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22828,6 +24553,10 @@ faces.<wbr/></p>
 <code>&gt;0</code> for LEGACY devices.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -22837,7 +24566,7 @@ faces.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22869,6 +24598,10 @@ bucket</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -22878,7 +24611,7 @@ bucket</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22910,6 +24643,10 @@ region.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -22919,7 +24656,7 @@ region.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -22957,6 +24694,10 @@ map</p>
               <p>Must be at least 32 x 32</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -22966,7 +24707,7 @@ map</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23004,6 +24745,10 @@ supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.statistics.hotPixelMapMode">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -23013,10 +24758,10 @@ supported by this camera device.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If no hotpixel map output is available for this camera device,<wbr/> this will contain only
 <code>false</code>.<wbr/></p>
 <p>ON is always supported on devices with the RAW capability.<wbr/></p>
@@ -23024,7 +24769,7 @@ supported by this camera device.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23062,15 +24807,19 @@ are supported by this camera device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.statistics.lensShadingMapMode">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map<wbr/>Mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If no lens shading map output is available for this camera device,<wbr/> this key will
 contain only OFF.<wbr/></p>
 <p>ON is always supported on devices with the RAW capability.<wbr/>
@@ -23079,7 +24828,64 @@ LEGACY mode devices will always only support OFF.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.statistics.info.availableOisDataModes">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Ois<wbr/>Data<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of OIS data output modes for <a href="#controls_android.statistics.oisDataMode">android.<wbr/>statistics.<wbr/>ois<wbr/>Data<wbr/>Mode</a> that
+are supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.statistics.oisDataMode">android.<wbr/>statistics.<wbr/>ois<wbr/>Data<wbr/>Mode</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>If no OIS data output is available for this camera device,<wbr/> this key will
+contain only OFF.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -23089,7 +24895,7 @@ LEGACY mode devices will always only support OFF.<wbr/></p>
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -23098,6 +24904,7 @@ LEGACY mode devices will always only support OFF.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -23130,17 +24937,17 @@ LEGACY mode devices will always only support OFF.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Do not include face detection statistics in capture
 results.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">SIMPLE</span>
+                    <span class="entry_type_enum_name">SIMPLE (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Return face rectangle and confidence values only.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_name">FULL (v3.2)</span>
                     <span class="entry_type_enum_optional">[optional]</span>
                     <span class="entry_type_enum_notes"><p>Return all face
 metadata.<wbr/></p>
@@ -23162,6 +24969,10 @@ unit.<wbr/></p>
               <p><a href="#static_android.statistics.info.availableFaceDetectModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Face<wbr/>Detect<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -23170,10 +24981,10 @@ unit.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Whether face detection is enabled,<wbr/> and whether it
 should output just the basic fields or the full set of
 fields.<wbr/></p>
@@ -23181,10 +24992,10 @@ fields.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>SIMPLE mode must fill in <a href="#dynamic_android.statistics.faceRectangles">android.<wbr/>statistics.<wbr/>face<wbr/>Rectangles</a> and
 <a href="#dynamic_android.statistics.faceScores">android.<wbr/>statistics.<wbr/>face<wbr/>Scores</a>.<wbr/>
 FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">android.<wbr/>statistics.<wbr/>face<wbr/>Ids</a>,<wbr/> and
@@ -23192,7 +25003,7 @@ FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">androi
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23228,6 +25039,10 @@ FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">androi
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -23236,10 +25051,10 @@ FULL mode must also fill in <a href="#dynamic_android.statistics.faceIds">androi
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Each detected face is given a unique ID that is valid for as long as the face is visible
 to the camera device.<wbr/>  A face that leaves the field of view and later returns may be
 assigned a new ID.<wbr/></p>
@@ -23248,7 +25063,7 @@ assigned a new ID.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23286,6 +25101,10 @@ faces.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -23294,10 +25113,10 @@ faces.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The coordinate system is that of <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with
 <code>(0,<wbr/> 0)</code> being the top-left pixel of the active array.<wbr/></p>
 <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> == FULL</p>
@@ -23305,7 +25124,7 @@ faces.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23343,6 +25162,10 @@ faces.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -23351,10 +25174,10 @@ faces.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The coordinate system is that of <a href="#static_android.sensor.info.activeArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>active<wbr/>Array<wbr/>Size</a>,<wbr/> with
 <code>(0,<wbr/> 0)</code> being the top-left pixel of the active array.<wbr/></p>
 <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> != OFF</p>
@@ -23362,7 +25185,7 @@ faces.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23400,6 +25223,10 @@ detected faces</p>
               <p>1-100</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_BC">BC</a></li>
@@ -23408,25 +25235,25 @@ detected faces</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> != OFF.<wbr/></p>
             </td>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The value should be meaningful (for example,<wbr/> setting 100 at
 all times is illegal).<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23464,21 +25291,25 @@ in this capture.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Only available if <a href="#controls_android.statistics.faceDetectMode">android.<wbr/>statistics.<wbr/>face<wbr/>Detect<wbr/>Mode</a> <code>!=</code> OFF.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23515,6 +25346,10 @@ sensor data</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -23523,10 +25358,10 @@ sensor data</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The k'th bucket (0-based) covers the input range
 (with w = <a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a>) of [ k * w/<wbr/>N,<wbr/>
 (k + 1) * w /<wbr/> N ).<wbr/> If only a monochrome sharpness map is
@@ -23535,7 +25370,7 @@ supported,<wbr/> all channels should have the same data</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23555,10 +25390,10 @@ supported,<wbr/> all channels should have the same data</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -23575,6 +25410,10 @@ generation</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -23584,7 +25423,7 @@ generation</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23621,6 +25460,10 @@ sensor data</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -23629,17 +25472,17 @@ sensor data</p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If only a monochrome sharpness map is supported,<wbr/>
 all channels should have the same data</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23659,10 +25502,10 @@ all channels should have the same data</p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -23679,6 +25522,10 @@ generation</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_FUTURE">FUTURE</a></li>
@@ -23688,7 +25535,7 @@ generation</p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23723,15 +25570,19 @@ Bayer color channel.<wbr/></p>
               <p>Each gain factor is &gt;= 1</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The map provided here is the same map that is used by the camera device to
 correct both color shading and vignetting for output non-RAW images.<wbr/></p>
 <p>When there is no lens shading correction applied to RAW
@@ -23783,7 +25634,7 @@ image of a gray wall (using bicubic interpolation for visual quality) as capture
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23823,15 +25674,19 @@ for each Bayer color channel of RAW image data.<wbr/></p>
               <p>Each gain factor is &gt;= 1</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The map provided here is the same map that is used by the camera device to
 correct both color shading and vignetting for output non-RAW images.<wbr/></p>
 <p>When there is no lens shading correction applied to RAW
@@ -23896,10 +25751,10 @@ formats.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The lens shading map calculation may depend on exposure and white balance statistics.<wbr/>
 When AE and AWB are in AUTO modes
 (<a href="#controls_android.control.aeMode">android.<wbr/>control.<wbr/>ae<wbr/>Mode</a> <code>!=</code> OFF and <a href="#controls_android.control.awbMode">android.<wbr/>control.<wbr/>awb<wbr/>Mode</a> <code>!=</code> OFF),<wbr/> the HAL
@@ -23912,7 +25767,7 @@ the latest known good map generated when the AE and AWB are in AUTO modes.<wbr/>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -23952,15 +25807,19 @@ by the camera device's statistics units for the current output frame.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This may be different than the gains used for this frame,<wbr/>
 since statistics processing on data from a new frame
 typically completes after the transform has already been
@@ -23973,7 +25832,7 @@ regardless of the android.<wbr/>control.<wbr/>* current values.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24014,15 +25873,19 @@ output frame.<wbr/></p>
               <p><span class="entry_range_deprecated">Deprecated</span>. Do not use.</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The camera device will provide the estimate from its
 statistics unit on the white balance transforms to use
 for the next frame.<wbr/> These are the values the camera device believes
@@ -24039,7 +25902,7 @@ regardless of the android.<wbr/>control.<wbr/>* current values.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24060,17 +25923,17 @@ regardless of the android.<wbr/>control.<wbr/>* current values.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">NONE</span>
+                    <span class="entry_type_enum_name">NONE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device does not detect any flickering illumination
 in the current scene.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">50HZ</span>
+                    <span class="entry_type_enum_name">50HZ (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device detects illumination flickering at 50Hz
 in the current scene.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">60HZ</span>
+                    <span class="entry_type_enum_name">60HZ (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>The camera device detects illumination flickering at 60Hz
 in the current scene.<wbr/></p></span>
                   </li>
@@ -24089,15 +25952,19 @@ frequency.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Many light sources,<wbr/> such as most fluorescent lights,<wbr/> flicker at a rate
 that depends on the local utility power standards.<wbr/> This flicker must be
 accounted for by auto-exposure routines to avoid artifacts in captured images.<wbr/>
@@ -24115,7 +25982,7 @@ into this metadata field.<wbr/> See
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24135,11 +26002,11 @@ into this metadata field.<wbr/> See
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Hot pixel map production is disabled.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Hot pixel map production is enabled.<wbr/></p></span>
                   </li>
                 </ul>
@@ -24157,6 +26024,10 @@ into this metadata field.<wbr/> See
               <p><a href="#static_android.statistics.info.availableHotPixelMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Hot<wbr/>Pixel<wbr/>Map<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -24166,17 +26037,17 @@ into this metadata field.<wbr/> See
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If set to <code>true</code>,<wbr/> a hot pixel map is returned in <a href="#dynamic_android.statistics.hotPixelMap">android.<wbr/>statistics.<wbr/>hot<wbr/>Pixel<wbr/>Map</a>.<wbr/>
 If set to <code>false</code>,<wbr/> no hot pixel map will be returned.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24215,6 +26086,10 @@ The <code>(x,<wbr/> y)</code> coordinates must be bounded by
 <a href="#static_android.sensor.info.pixelArraySize">android.<wbr/>sensor.<wbr/>info.<wbr/>pixel<wbr/>Array<wbr/>Size</a>.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -24224,10 +26099,10 @@ The <code>(x,<wbr/> y)</code> coordinates must be bounded by
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>A coordinate <code>(x,<wbr/> y)</code> must lie between <code>(0,<wbr/> 0)</code>,<wbr/> and
 <code>(width - 1,<wbr/> height - 1)</code> (inclusive),<wbr/> which are the top-left and
 bottom-right of the pixel array,<wbr/> respectively.<wbr/> The width and
@@ -24238,10 +26113,10 @@ bounds given by <a href="#static_android.sensor.info.activeArraySize">android.<w
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>A hotpixel map contains the coordinates of pixels on the camera
 sensor that do report valid values (usually due to defects in
 the camera sensor).<wbr/> This includes pixels that are stuck at certain
@@ -24252,7 +26127,7 @@ pixels than actual pixels on the camera sensor.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24273,11 +26148,11 @@ pixels than actual pixels on the camera sensor.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Do not include a lens shading map in the capture result.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Include a lens shading map in the capture result.<wbr/></p></span>
                   </li>
                 </ul>
@@ -24296,6 +26171,10 @@ shading map in output result metadata.<wbr/></p>
               <p><a href="#static_android.statistics.info.availableLensShadingMapModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Lens<wbr/>Shading<wbr/>Map<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_RAW">RAW</a></li>
@@ -24304,10 +26183,10 @@ shading map in output result metadata.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to ON,<wbr/>
 <a href="#dynamic_android.statistics.lensShadingMap">android.<wbr/>statistics.<wbr/>lens<wbr/>Shading<wbr/>Map</a> will be provided in
 the output result metadata.<wbr/></p>
@@ -24316,183 +26195,211 @@ the output result metadata.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
-        
+                
+          <tr class="entry" id="dynamic_android.statistics.oisDataMode">
+            <td class="entry_name
+             " rowspan="1">
+              android.<wbr/>statistics.<wbr/>ois<wbr/>Data<wbr/>Mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
 
-      <!-- end of kind -->
-      </tbody>
+              <span class="entry_type_visibility"> [public]</span>
 
-  <!-- end of section -->
-  <tr><td colspan="6" id="section_tonemap" class="section">tonemap</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
 
-      <thead class="entries_header">
-        <tr>
-          <th class="th_name">Property Name</th>
-          <th class="th_type">Type</th>
-          <th class="th_description">Description</th>
-          <th class="th_units">Units</th>
-          <th class="th_range">Range</th>
-          <th class="th_tags">Tags</th>
-        </tr>
-      </thead>
 
-      <tbody>
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Do not include OIS data in the capture result.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">ON (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Include OIS data in the capture result.<wbr/></p></span>
+                  </li>
+                </ul>
 
-        
+            </td> <!-- entry_type -->
 
-        
+            <td class="entry_description">
+              <p>A control for selecting whether OIS position information is included in output
+result metadata.<wbr/></p>
+            </td>
 
-        
+            <td class="entry_units">
+            </td>
 
-        
+            <td class="entry_range">
+              <p><a href="#static_android.statistics.info.availableOisDataModes">android.<wbr/>statistics.<wbr/>info.<wbr/>available<wbr/>Ois<wbr/>Data<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
 
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
                 
-          <tr class="entry" id="controls_android.tonemap.curveBlue">
+          <tr class="entry" id="dynamic_android.statistics.oisTimestamps">
             <td class="entry_name
              " rowspan="3">
-              android.<wbr/>tonemap.<wbr/>curve<wbr/>Blue
+              android.<wbr/>statistics.<wbr/>ois<wbr/>Timestamps
             </td>
             <td class="entry_type">
-                <span class="entry_type_name">float</span>
+                <span class="entry_type_name">int64</span>
                 <span class="entry_type_container">x</span>
 
                 <span class="entry_type_array">
-                  n x 2
+                  n
                 </span>
               <span class="entry_type_visibility"> [ndk_public]</span>
 
 
-              <span class="entry_type_hwlevel">[full] </span>
 
 
-                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
 
 
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the blue
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
+              <p>An array of timestamps of OIS samples,<wbr/> in nanoseconds.<wbr/></p>
             </td>
 
             <td class="entry_units">
+              nanoseconds
             </td>
 
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            <td class="entry_details" colspan="6">
+              <p>The array contains the timestamps of OIS samples.<wbr/> The timestamps are in the same
+timebase as and comparable to <a href="#dynamic_android.sensor.timestamp">android.<wbr/>sensor.<wbr/>timestamp</a>.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
-          <tr class="entry" id="controls_android.tonemap.curveGreen">
+          <tr class="entry" id="dynamic_android.statistics.oisXShifts">
             <td class="entry_name
              " rowspan="3">
-              android.<wbr/>tonemap.<wbr/>curve<wbr/>Green
+              android.<wbr/>statistics.<wbr/>ois<wbr/>XShifts
             </td>
             <td class="entry_type">
                 <span class="entry_type_name">float</span>
                 <span class="entry_type_container">x</span>
 
                 <span class="entry_type_array">
-                  n x 2
+                  n
                 </span>
               <span class="entry_type_visibility"> [ndk_public]</span>
 
 
-              <span class="entry_type_hwlevel">[full] </span>
 
 
-                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
 
 
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the green
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
+              <p>An array of shifts of OIS samples,<wbr/> in x direction.<wbr/></p>
             </td>
 
             <td class="entry_units">
+              Pixels in active array.<wbr/>
             </td>
 
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            <td class="entry_details" colspan="6">
+              <p>The array contains the amount of shifts in x direction,<wbr/> in pixels,<wbr/> based on OIS samples.<wbr/>
+A positive value is a shift from left to right in active array coordinate system.<wbr/> For
+example,<wbr/> if the optical center is (1000,<wbr/> 500) in active array coordinates,<wbr/> a shift of
+(3,<wbr/> 0) puts the new optical center at (1003,<wbr/> 500).<wbr/></p>
+<p>The number of shifts must match the number of timestamps in
+<a href="#dynamic_android.statistics.oisTimestamps">android.<wbr/>statistics.<wbr/>ois<wbr/>Timestamps</a>.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
-          <tr class="entry" id="controls_android.tonemap.curveRed">
+          <tr class="entry" id="dynamic_android.statistics.oisYShifts">
             <td class="entry_name
-             " rowspan="5">
-              android.<wbr/>tonemap.<wbr/>curve<wbr/>Red
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>ois<wbr/>YShifts
             </td>
             <td class="entry_type">
                 <span class="entry_type_name">float</span>
                 <span class="entry_type_container">x</span>
 
                 <span class="entry_type_array">
-                  n x 2
+                  n
                 </span>
               <span class="entry_type_visibility"> [ndk_public]</span>
 
 
-              <span class="entry_type_hwlevel">[full] </span>
 
 
-                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
 
 
             </td> <!-- entry_type -->
 
             <td class="entry_description">
-              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the red
-channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
-CONTRAST_<wbr/>CURVE.<wbr/></p>
+              <p>An array of shifts of OIS samples,<wbr/> in y direction.<wbr/></p>
             </td>
 
             <td class="entry_units">
+              Pixels in active array.<wbr/>
             </td>
 
             <td class="entry_range">
-              <p>0-1 on both input and output coordinates,<wbr/> normalized
-as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
             </td>
 
             <td class="entry_tags">
@@ -24500,15 +26407,287 @@ as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>Each channel's curve is defined by an array of control points:</p>
-<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> =
-  [ P0in,<wbr/> P0out,<wbr/> P1in,<wbr/> P1out,<wbr/> P2in,<wbr/> P2out,<wbr/> P3in,<wbr/> P3out,<wbr/> ...,<wbr/> PNin,<wbr/> PNout ]
-2 &lt;= N &lt;= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
-<p>These are sorted in order of increasing <code>Pin</code>; it is
+            <td class="entry_details" colspan="6">
+              <p>The array contains the amount of shifts in y direction,<wbr/> in pixels,<wbr/> based on OIS samples.<wbr/>
+A positive value is a shift from top to bottom in active array coordinate system.<wbr/> For
+example,<wbr/> if the optical center is (1000,<wbr/> 500) in active array coordinates,<wbr/> a shift of
+(0,<wbr/> 5) puts the new optical center at (1000,<wbr/> 505).<wbr/></p>
+<p>The number of shifts must match the number of timestamps in
+<a href="#dynamic_android.statistics.oisTimestamps">android.<wbr/>statistics.<wbr/>ois<wbr/>Timestamps</a>.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="dynamic_android.statistics.oisSamples">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>statistics.<wbr/>ois<wbr/>Samples
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [java_public as oisSample]</span>
+
+              <span class="entry_type_synthetic">[synthetic] </span>
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>An array of OIS samples.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Each OIS sample contains the timestamp and the amount of shifts in x and y direction,<wbr/>
+in pixels,<wbr/> of the OIS sample.<wbr/></p>
+<p>A positive value for a shift in x direction is a shift from left to right in active array
+coordinate system.<wbr/> For example,<wbr/> if the optical center is (1000,<wbr/> 500) in active array
+coordinates,<wbr/> a shift of (3,<wbr/> 0) puts the new optical center at (1003,<wbr/> 500).<wbr/></p>
+<p>A positive value for a shift in y direction is a shift from top to bottom in active array
+coordinate system.<wbr/> For example,<wbr/> if the optical center is (1000,<wbr/> 500) in active array
+coordinates,<wbr/> a shift of (0,<wbr/> 5) puts the new optical center at (1000,<wbr/> 505).<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="7" id="section_tonemap" class="section">tonemap</td></tr>
+
+
+      <tr><td colspan="7" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.tonemap.curveBlue">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Blue
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [ndk_public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the blue
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.curveGreen">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Green
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [ndk_public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the green
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="controls_android.tonemap.curveRed">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>tonemap.<wbr/>curve<wbr/>Red
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">float</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n x 2
+                </span>
+              <span class="entry_type_visibility"> [ndk_public]</span>
+
+
+              <span class="entry_type_hwlevel">[full] </span>
+
+
+                <div class="entry_type_notes">1D array of float pairs (P_<wbr/>IN,<wbr/> P_<wbr/>OUT).<wbr/> The maximum number of pairs is specified by android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points.<wbr/></div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Tonemapping /<wbr/> contrast /<wbr/> gamma curve for the red
+channel,<wbr/> to use when <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> is
+CONTRAST_<wbr/>CURVE.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>0-1 on both input and output coordinates,<wbr/> normalized
+as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>Each channel's curve is defined by an array of control points:</p>
+<pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> =
+  [ P0in,<wbr/> P0out,<wbr/> P1in,<wbr/> P1out,<wbr/> P2in,<wbr/> P2out,<wbr/> P3in,<wbr/> P3out,<wbr/> ...,<wbr/> PNin,<wbr/> PNout ]
+2 &lt;= N &lt;= <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a></code></pre>
+<p>These are sorted in order of increasing <code>Pin</code>; it is
 required that input values 0.<wbr/>0 and 1.<wbr/>0 are included in the list to
 define a complete mapping.<wbr/> For input values between control points,<wbr/>
 the camera device must linearly interpolate between the control
@@ -24517,6 +26696,8 @@ points.<wbr/></p>
 of points can be less than max (that is,<wbr/> the request doesn't have to
 always provide a curve with number of points equivalent to
 <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>For devices with MONOCHROME capability,<wbr/> only red channel is used.<wbr/> Green and blue channels
+are ignored.<wbr/></p>
 <p>A few examples,<wbr/> and their corresponding graphical mappings; these
 only specify the red channel and the precision is limited to 4
 digits,<wbr/> for conciseness.<wbr/></p>
@@ -24548,10 +26729,10 @@ digits,<wbr/> for conciseness.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For good quality of mapping,<wbr/> at least 128 control points are
 preferred.<wbr/></p>
 <p>A typical use case of this would be a gamma-1/<wbr/>2.<wbr/>2 curve,<wbr/> with as many
@@ -24559,7 +26740,7 @@ control points used as are available.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24593,15 +26774,19 @@ is CONTRAST_<wbr/>CURVE.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The tonemapCurve consist of three curves for each of red,<wbr/> green,<wbr/> and blue
 channels respectively.<wbr/> The following example uses the red channel as an
 example.<wbr/> The same logic applies to green and blue channel.<wbr/>
@@ -24618,6 +26803,8 @@ points.<wbr/></p>
 of points can be less than max (that is,<wbr/> the request doesn't have to
 always provide a curve with number of points equivalent to
 <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>For devices with MONOCHROME capability,<wbr/> only red channel is used.<wbr/> Green and blue channels
+are ignored.<wbr/></p>
 <p>A few examples,<wbr/> and their corresponding graphical mappings; these
 only specify the red channel and the precision is limited to 4
 digits,<wbr/> for conciseness.<wbr/></p>
@@ -24649,16 +26836,16 @@ digits,<wbr/> for conciseness.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is created by the framework from the curveRed,<wbr/> curveGreen and
 curveBlue entries.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24679,7 +26866,7 @@ curveBlue entries.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">CONTRAST_CURVE</span>
+                    <span class="entry_type_enum_name">CONTRAST_CURVE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the tone mapping curve specified in
 the <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>* entries.<wbr/></p>
 <p>All color enhancement and tonemapping must be disabled,<wbr/> except
@@ -24689,17 +26876,17 @@ for applying the tonemapping curve specified by
 sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Advanced gamma mapping and color enhancement may be applied,<wbr/> without
 reducing frame rate compared to raw sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>High-quality gamma mapping and color enhancement will be applied,<wbr/> at
 the cost of possibly reduced frame rate compared to raw sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">GAMMA_VALUE</span>
+                    <span class="entry_type_enum_name">GAMMA_VALUE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the gamma value specified in <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a> to peform
 tonemapping.<wbr/></p>
 <p>All color enhancement and tonemapping must be disabled,<wbr/> except
@@ -24707,7 +26894,7 @@ for applying the tonemapping curve specified by <a href="#controls_android.tonem
 <p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PRESET_CURVE</span>
+                    <span class="entry_type_enum_name">PRESET_CURVE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the preset tonemapping curve specified in
 <a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a> to peform tonemapping.<wbr/></p>
 <p>All color enhancement and tonemapping must be disabled,<wbr/> except
@@ -24730,15 +26917,19 @@ for applying the tonemapping curve specified by
               <p><a href="#static_android.tonemap.availableToneMapModes">android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When switching to an application-defined contrast curve by setting
 <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> to CONTRAST_<wbr/>CURVE,<wbr/> the curve is defined
 per-channel with a set of <code>(in,<wbr/> out)</code> points that specify the
@@ -24761,7 +26952,7 @@ roughly the same.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24793,15 +26984,19 @@ GAMMA_<wbr/>VALUE</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The tonemap curve will be defined the following formula:
 * OUT = pow(IN,<wbr/> 1.<wbr/>0 /<wbr/> gamma)
 where IN and OUT is the input pixel value scaled to range [0.<wbr/>0,<wbr/> 1.<wbr/>0],<wbr/>
@@ -24816,7 +27011,7 @@ within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -24836,11 +27031,11 @@ within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">SRGB</span>
+                    <span class="entry_type_enum_name">SRGB (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by sRGB</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">REC709</span>
+                    <span class="entry_type_enum_name">REC709 (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by ITU-R BT.<wbr/>709</p></span>
                   </li>
                 </ul>
@@ -24858,15 +27053,19 @@ PRESET_<wbr/>CURVE</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The tonemap curve will be defined by specified standard.<wbr/></p>
 <p>sRGB (approximated by 16 control points):</p>
 <p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
@@ -24878,14 +27077,14 @@ curves.<wbr/> Camera devices may apply a different approximation to the curve.<w
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -24894,6 +27093,7 @@ curves.<wbr/> Camera devices may apply a different approximation to the curve.<w
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -24938,15 +27138,19 @@ tonemap curve that can be used for <a href="#controls_android.tonemap.curve">and
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If the actual number of points provided by the application (in <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>*) is
 less than this maximum,<wbr/> the camera device will resample the curve to its internal
 representation,<wbr/> using linear interpolation.<wbr/></p>
@@ -24957,15 +27161,15 @@ hardware curves used as closely as possible when linearly interpolated.<wbr/></p
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This value must be at least 64.<wbr/> This should be at least 128.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -25004,15 +27208,19 @@ device.<wbr/></p>
               <p>Any value listed in <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Camera devices that support the MANUAL_<wbr/>POST_<wbr/>PROCESSING capability will always contain
 at least one of below mode combinations:</p>
 <ul>
@@ -25024,10 +27232,10 @@ at least one of below mode combinations:</p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if automatic tonemap control is available
 on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
 That is,<wbr/> if the highest quality implementation on the camera device does not slow down
@@ -25035,14 +27243,14 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -25051,6 +27259,7 @@ capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same outp
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -25101,21 +27310,25 @@ CONTRAST_<wbr/>CURVE.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -25154,21 +27367,25 @@ CONTRAST_<wbr/>CURVE.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>See <a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> for more details.<wbr/></p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -25209,15 +27426,19 @@ CONTRAST_<wbr/>CURVE.<wbr/></p>
 as a floating-point value such that 0 == black and 1 == white.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Each channel's curve is defined by an array of control points:</p>
 <pre><code><a href="#controls_android.tonemap.curveRed">android.<wbr/>tonemap.<wbr/>curve<wbr/>Red</a> =
   [ P0in,<wbr/> P0out,<wbr/> P1in,<wbr/> P1out,<wbr/> P2in,<wbr/> P2out,<wbr/> P3in,<wbr/> P3out,<wbr/> ...,<wbr/> PNin,<wbr/> PNout ]
@@ -25231,6 +27452,8 @@ points.<wbr/></p>
 of points can be less than max (that is,<wbr/> the request doesn't have to
 always provide a curve with number of points equivalent to
 <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>For devices with MONOCHROME capability,<wbr/> only red channel is used.<wbr/> Green and blue channels
+are ignored.<wbr/></p>
 <p>A few examples,<wbr/> and their corresponding graphical mappings; these
 only specify the red channel and the precision is limited to 4
 digits,<wbr/> for conciseness.<wbr/></p>
@@ -25262,10 +27485,10 @@ digits,<wbr/> for conciseness.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For good quality of mapping,<wbr/> at least 128 control points are
 preferred.<wbr/></p>
 <p>A typical use case of this would be a gamma-1/<wbr/>2.<wbr/>2 curve,<wbr/> with as many
@@ -25273,7 +27496,7 @@ control points used as are available.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -25307,15 +27530,19 @@ is CONTRAST_<wbr/>CURVE.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The tonemapCurve consist of three curves for each of red,<wbr/> green,<wbr/> and blue
 channels respectively.<wbr/> The following example uses the red channel as an
 example.<wbr/> The same logic applies to green and blue channel.<wbr/>
@@ -25332,6 +27559,8 @@ points.<wbr/></p>
 of points can be less than max (that is,<wbr/> the request doesn't have to
 always provide a curve with number of points equivalent to
 <a href="#static_android.tonemap.maxCurvePoints">android.<wbr/>tonemap.<wbr/>max<wbr/>Curve<wbr/>Points</a>).<wbr/></p>
+<p>For devices with MONOCHROME capability,<wbr/> only red channel is used.<wbr/> Green and blue channels
+are ignored.<wbr/></p>
 <p>A few examples,<wbr/> and their corresponding graphical mappings; these
 only specify the red channel and the precision is limited to 4
 digits,<wbr/> for conciseness.<wbr/></p>
@@ -25363,16 +27592,16 @@ digits,<wbr/> for conciseness.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This entry is created by the framework from the curveRed,<wbr/> curveGreen and
 curveBlue entries.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -25393,7 +27622,7 @@ curveBlue entries.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">CONTRAST_CURVE</span>
+                    <span class="entry_type_enum_name">CONTRAST_CURVE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the tone mapping curve specified in
 the <a href="#controls_android.tonemap.curve">android.<wbr/>tonemap.<wbr/>curve</a>* entries.<wbr/></p>
 <p>All color enhancement and tonemapping must be disabled,<wbr/> except
@@ -25403,17 +27632,17 @@ for applying the tonemapping curve specified by
 sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FAST</span>
+                    <span class="entry_type_enum_name">FAST (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Advanced gamma mapping and color enhancement may be applied,<wbr/> without
 reducing frame rate compared to raw sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">HIGH_QUALITY</span>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>High-quality gamma mapping and color enhancement will be applied,<wbr/> at
 the cost of possibly reduced frame rate compared to raw sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">GAMMA_VALUE</span>
+                    <span class="entry_type_enum_name">GAMMA_VALUE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the gamma value specified in <a href="#controls_android.tonemap.gamma">android.<wbr/>tonemap.<wbr/>gamma</a> to peform
 tonemapping.<wbr/></p>
 <p>All color enhancement and tonemapping must be disabled,<wbr/> except
@@ -25421,7 +27650,7 @@ for applying the tonemapping curve specified by <a href="#controls_android.tonem
 <p>Must not slow down frame rate relative to raw sensor output.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">PRESET_CURVE</span>
+                    <span class="entry_type_enum_name">PRESET_CURVE (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Use the preset tonemapping curve specified in
 <a href="#controls_android.tonemap.presetCurve">android.<wbr/>tonemap.<wbr/>preset<wbr/>Curve</a> to peform tonemapping.<wbr/></p>
 <p>All color enhancement and tonemapping must be disabled,<wbr/> except
@@ -25444,15 +27673,19 @@ for applying the tonemapping curve specified by
               <p><a href="#static_android.tonemap.availableToneMapModes">android.<wbr/>tonemap.<wbr/>available<wbr/>Tone<wbr/>Map<wbr/>Modes</a></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When switching to an application-defined contrast curve by setting
 <a href="#controls_android.tonemap.mode">android.<wbr/>tonemap.<wbr/>mode</a> to CONTRAST_<wbr/>CURVE,<wbr/> the curve is defined
 per-channel with a set of <code>(in,<wbr/> out)</code> points that specify the
@@ -25475,7 +27708,7 @@ roughly the same.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -25507,15 +27740,19 @@ GAMMA_<wbr/>VALUE</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The tonemap curve will be defined the following formula:
 * OUT = pow(IN,<wbr/> 1.<wbr/>0 /<wbr/> gamma)
 where IN and OUT is the input pixel value scaled to range [0.<wbr/>0,<wbr/> 1.<wbr/>0],<wbr/>
@@ -25530,7 +27767,7 @@ within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -25550,11 +27787,11 @@ within [1.<wbr/>0,<wbr/> 5.<wbr/>0] are guaranteed not to be clipped.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">SRGB</span>
+                    <span class="entry_type_enum_name">SRGB (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by sRGB</p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">REC709</span>
+                    <span class="entry_type_enum_name">REC709 (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>Tonemapping curve is defined by ITU-R BT.<wbr/>709</p></span>
                   </li>
                 </ul>
@@ -25572,15 +27809,19 @@ PRESET_<wbr/>CURVE</p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The tonemap curve will be defined by specified standard.<wbr/></p>
 <p>sRGB (approximated by 16 control points):</p>
 <p><img alt="sRGB tonemapping curve" src="images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png"/></p>
@@ -25592,7 +27833,7 @@ curves.<wbr/> Camera devices may apply a different approximation to the curve.<w
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -25601,10 +27842,10 @@ curves.<wbr/> Camera devices may apply a different approximation to the curve.<w
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_led" class="section">led</td></tr>
+  <tr><td colspan="7" id="section_led" class="section">led</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -25613,6 +27854,7 @@ curves.<wbr/> Camera devices may apply a different approximation to the curve.<w
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -25644,10 +27886,10 @@ curves.<wbr/> Camera devices may apply a different approximation to the curve.<w
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -25672,20 +27914,24 @@ doesn't violate the above rules.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -25694,6 +27940,7 @@ doesn't violate the above rules.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -25725,10 +27972,10 @@ doesn't violate the above rules.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -25753,20 +28000,24 @@ doesn't violate the above rules.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -25775,6 +28026,7 @@ doesn't violate the above rules.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -25810,7 +28062,7 @@ doesn't violate the above rules.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">TRANSMIT</span>
+                    <span class="entry_type_enum_name">TRANSMIT (v3.2)</span>
                     <span class="entry_type_enum_notes"><p><a href="#controls_android.led.transmit">android.<wbr/>led.<wbr/>transmit</a> control is used.<wbr/></p></span>
                   </li>
                 </ul>
@@ -25827,13 +28079,17 @@ doesn't violate the above rules.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -25842,10 +28098,10 @@ doesn't violate the above rules.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_info" class="section">info</td></tr>
+  <tr><td colspan="7" id="section_info" class="section">info</td></tr>
 
 
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -25854,6 +28110,7 @@ doesn't violate the above rules.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -25886,7 +28143,7 @@ doesn't violate the above rules.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">LIMITED</span>
+                    <span class="entry_type_enum_name">LIMITED (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
 better.<wbr/></p>
 <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
@@ -25906,7 +28163,7 @@ supported for <a href="#controls_android.control.aeMode">android.<wbr/>control.<
 can be checked for in <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a>.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">FULL</span>
+                    <span class="entry_type_enum_name">FULL (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This camera device is capable of supporting advanced imaging applications.<wbr/></p>
 <p>The stream configurations listed in the <code>FULL</code>,<wbr/> <code>LEGACY</code> and <code>LIMITED</code> tables in the
 <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are guaranteed to be supported.<wbr/></p>
@@ -25927,10 +28184,9 @@ Pre-API level 23,<wbr/> FULL devices also supported arbitrary cropping region
 23,<wbr/> and <code>FULL</code> devices may only support <code>CENTERED</code> cropping.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">LEGACY</span>
+                    <span class="entry_type_enum_name">LEGACY (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This camera device is running in backward compatibility mode.<wbr/></p>
-<p>Only the stream configurations listed in the <code>LEGACY</code> table in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a>
-documentation are supported.<wbr/></p>
+<p>Only the stream configurations listed in the <code>LEGACY</code> table in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are supported.<wbr/></p>
 <p>A <code>LEGACY</code> device does not support per-frame control,<wbr/> manual sensor control,<wbr/> manual
 post-processing,<wbr/> arbitrary cropping regions,<wbr/> and has relaxed performance constraints.<wbr/>
 No additional capabilities beyond <code>BACKWARD_<wbr/>COMPATIBLE</code> will ever be listed by a
@@ -25943,12 +28199,11 @@ for the final capture,<wbr/> if a flash is available on the device and the AE mo
 enable the flash.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">3</span>
+                    <span class="entry_type_enum_name">3 (v3.2)</span>
                     <span class="entry_type_enum_notes"><p>This camera device is capable of YUV reprocessing and RAW data capture,<wbr/> in addition to
 FULL-level capabilities.<wbr/></p>
 <p>The stream configurations listed in the <code>LEVEL_<wbr/>3</code>,<wbr/> <code>RAW</code>,<wbr/> <code>FULL</code>,<wbr/> <code>LEGACY</code> and
-<code>LIMITED</code> tables in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a>
-documentation are guaranteed to be supported.<wbr/></p>
+<code>LIMITED</code> tables in the <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">createCaptureSession</a> documentation are guaranteed to be supported.<wbr/></p>
 <p>The following additional capabilities are guaranteed to be supported:</p>
 <ul>
 <li><code>YUV_<wbr/>REPROCESSING</code> capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains
@@ -25957,6 +28212,27 @@ documentation are guaranteed to be supported.<wbr/></p>
   <code>RAW</code>)</li>
 </ul></span>
                   </li>
+                  <li>
+                    <span class="entry_type_enum_name">EXTERNAL (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>This camera device is backed by an external camera connected to this Android device.<wbr/></p>
+<p>The device has capability identical to a LIMITED level device,<wbr/> with the following
+exceptions:</p>
+<ul>
+<li>The device may not report lens/<wbr/>sensor related information such as<ul>
+<li><a href="#controls_android.lens.focalLength">android.<wbr/>lens.<wbr/>focal<wbr/>Length</a></li>
+<li><a href="#static_android.lens.info.hyperfocalDistance">android.<wbr/>lens.<wbr/>info.<wbr/>hyperfocal<wbr/>Distance</a></li>
+<li><a href="#static_android.sensor.info.physicalSize">android.<wbr/>sensor.<wbr/>info.<wbr/>physical<wbr/>Size</a></li>
+<li><a href="#static_android.sensor.info.whiteLevel">android.<wbr/>sensor.<wbr/>info.<wbr/>white<wbr/>Level</a></li>
+<li><a href="#static_android.sensor.blackLevelPattern">android.<wbr/>sensor.<wbr/>black<wbr/>Level<wbr/>Pattern</a></li>
+<li><a href="#static_android.sensor.info.colorFilterArrangement">android.<wbr/>sensor.<wbr/>info.<wbr/>color<wbr/>Filter<wbr/>Arrangement</a></li>
+<li><a href="#dynamic_android.sensor.rollingShutterSkew">android.<wbr/>sensor.<wbr/>rolling<wbr/>Shutter<wbr/>Skew</a></li>
+</ul>
+</li>
+<li>The device will report 0 for <a href="#static_android.sensor.orientation">android.<wbr/>sensor.<wbr/>orientation</a></li>
+<li>The device has less guarantee on stable framerate,<wbr/> as the framerate partly depends
+  on the external camera being used.<wbr/></li>
+</ul></span>
+                  </li>
                 </ul>
 
             </td> <!-- entry_type -->
@@ -25971,15 +28247,19 @@ documentation are guaranteed to be supported.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The supported hardware level is a high-level description of the camera device's
 capabilities,<wbr/> summarizing several capabilities into one field.<wbr/>  Each level adds additional
 features to the previous one,<wbr/> and is always a strict superset of the previous level.<wbr/>
@@ -26012,7 +28292,7 @@ boolean isHardwareLevelSupported(CameraCharacteristics c,<wbr/> int requiredLeve
 <p>See the individual level enums for full descriptions of the supported capabilities.<wbr/>  The
 <a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> entry describes the device's capabilities at a
 finer-grain level,<wbr/> if needed.<wbr/> In addition,<wbr/> many controls have their available settings or
-ranges defined in individual <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a> entries.<wbr/></p>
+ranges defined in individual entries from <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>.<wbr/></p>
 <p>Some features are not part of any particular hardware level or capability and must be
 queried separately.<wbr/> These include:</p>
 <ul>
@@ -26027,11 +28307,11 @@ queried separately.<wbr/> These include:</p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
-              <p>The camera 3 HAL device can implement one of three possible operational modes; LIMITED,<wbr/>
+            <td class="entry_details" colspan="6">
+              <p>A camera HALv3 device can implement one of three possible operational modes; LIMITED,<wbr/>
 FULL,<wbr/> and LEVEL_<wbr/>3.<wbr/></p>
 <p>FULL support or better is expected from new higher-end devices.<wbr/> Limited
 mode has hardware requirements roughly in line with those for a camera HAL device v1
@@ -26039,12 +28319,77 @@ implementation,<wbr/> and is expected from older or inexpensive devices.<wbr/> E
 superset of the previous level,<wbr/> and they share the same essential operational flow.<wbr/></p>
 <p>For full details refer to "S3.<wbr/> Operational Modes" in camera3.<wbr/>h</p>
 <p>Camera HAL3+ must not implement LEGACY mode.<wbr/> It is there for backwards compatibility in
-the <code>android.<wbr/>hardware.<wbr/>camera2</code> user-facing API only on HALv1 devices,<wbr/> and is implemented
-by the camera framework code.<wbr/></p>
+the <code>android.<wbr/>hardware.<wbr/>camera2</code> user-facing API only on legacy HALv1 devices,<wbr/> and is
+implemented by the camera framework code.<wbr/></p>
+<p>EXTERNAL level devices have lower peformance bar in CTS since the peformance might depend
+on the external camera being used and is not fully controlled by the device manufacturer.<wbr/>
+The ITS test suite is exempted for the same reason.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.info.version">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>info.<wbr/>version
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+
+              <span class="entry_type_visibility"> [public as string]</span>
+
+
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>A short string for manufacturer version information about the camera device,<wbr/> such as
+ISP hardware,<wbr/> sensors,<wbr/> etc.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>This can be used in <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_IMAGE_DESCRIPTION">TAG_<wbr/>IMAGE_<wbr/>DESCRIPTION</a>
+in jpeg EXIF.<wbr/> This key may be absent if no version information is available on the
+device.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The string must consist of only alphanumeric characters,<wbr/> punctuation,<wbr/> and
+whitespace,<wbr/> i.<wbr/>e.<wbr/> it must match regular expression "[\p{Alnum}\p{Punct}\p{Space}]*".<wbr/>
+It must not exceed 256 characters.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -26053,10 +28398,10 @@ by the camera framework code.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_blackLevel" class="section">blackLevel</td></tr>
+  <tr><td colspan="7" id="section_blackLevel" class="section">blackLevel</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26065,6 +28410,7 @@ by the camera framework code.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26097,10 +28443,10 @@ by the camera framework code.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -26117,6 +28463,10 @@ to its current values,<wbr/> or is free to vary.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_HAL2">HAL2</a></li>
@@ -26125,10 +28475,10 @@ to its current values,<wbr/> or is free to vary.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When set to <code>true</code> (ON),<wbr/> the values used for black-level
 compensation will not change until the lock is set to
 <code>false</code> (OFF).<wbr/></p>
@@ -26168,10 +28518,10 @@ or reset.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If for some reason black level locking is no longer possible
 (for example,<wbr/> the analog gain has changed,<wbr/> which forces
 black level offsets to be recalculated),<wbr/> then the HAL must
@@ -26181,14 +28531,14 @@ possible again.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26197,6 +28547,7 @@ possible again.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26229,10 +28580,10 @@ possible again.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OFF</span>
+                    <span class="entry_type_enum_name">OFF (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">ON</span>
+                    <span class="entry_type_enum_name">ON (v3.2)</span>
                   </li>
                 </ul>
 
@@ -26249,6 +28600,10 @@ to its current values,<wbr/> or is free to vary.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_HAL2">HAL2</a></li>
@@ -26257,10 +28612,10 @@ to its current values,<wbr/> or is free to vary.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Whether the black level offset was locked for this frame.<wbr/>  Should be
 ON if <a href="#controls_android.blackLevel.lock">android.<wbr/>black<wbr/>Level.<wbr/>lock</a> was ON in the capture request,<wbr/> unless
 a change in other capture settings forced the camera device to
@@ -26269,10 +28624,10 @@ perform a black level reset.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If for some reason black level locking is no longer possible
 (for example,<wbr/> the analog gain has changed,<wbr/> which forces
 black level offsets to be recalculated),<wbr/> then the HAL must
@@ -26282,7 +28637,7 @@ possible again.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -26291,10 +28646,10 @@ possible again.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_sync" class="section">sync</td></tr>
+  <tr><td colspan="7" id="section_sync" class="section">sync</td></tr>
 
 
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26303,6 +28658,7 @@ possible again.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26335,7 +28691,7 @@ possible again.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">CONVERGING</span>
+                    <span class="entry_type_enum_name">CONVERGING (v3.2)</span>
                     <span class="entry_type_enum_value">-1</span>
                     <span class="entry_type_enum_notes"><p>The current result is not yet fully synchronized to any request.<wbr/></p>
 <p>Synchronization is in progress,<wbr/> and reading metadata from this
@@ -26347,7 +28703,7 @@ the result is guaranteed to be synchronized to (as long as the
 request settings remain constant).<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">UNKNOWN</span>
+                    <span class="entry_type_enum_name">UNKNOWN (v3.2)</span>
                     <span class="entry_type_enum_value">-2</span>
                     <span class="entry_type_enum_notes"><p>The current result's synchronization status is unknown.<wbr/></p>
 <p>The result may have already converged,<wbr/> or it may be in
@@ -26379,6 +28735,10 @@ synchronized.<wbr/></p>
 <code>frame_<wbr/>number</code>,<wbr/> or one of the two enums (CONVERGING /<wbr/> UNKNOWN).<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -26387,10 +28747,10 @@ synchronized.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>When a request is submitted to the camera device,<wbr/> there is usually a
 delay of several frames before the controls get applied.<wbr/> A camera
 device may either choose to account for this delay by implementing a
@@ -26443,10 +28803,10 @@ of all the results corresponding to currently in-flight requests.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>Using UNKNOWN here is illegal unless <a href="#static_android.sync.maxLatency">android.<wbr/>sync.<wbr/>max<wbr/>Latency</a>
 is also UNKNOWN.<wbr/></p>
 <p>FULL capability devices should simply set this value to the
@@ -26454,14 +28814,14 @@ is also UNKNOWN.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26470,6 +28830,7 @@ is also UNKNOWN.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26502,7 +28863,7 @@ is also UNKNOWN.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">PER_FRAME_CONTROL</span>
+                    <span class="entry_type_enum_name">PER_FRAME_CONTROL (v3.2)</span>
                     <span class="entry_type_enum_value">0</span>
                     <span class="entry_type_enum_notes"><p>Every frame has the requests immediately applied.<wbr/></p>
 <p>Changing controls over multiple requests one after another will
@@ -26511,7 +28872,7 @@ each frame.<wbr/></p>
 <p>All FULL capability devices will have this as their maxLatency.<wbr/></p></span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">UNKNOWN</span>
+                    <span class="entry_type_enum_name">UNKNOWN (v3.2)</span>
                     <span class="entry_type_enum_value">-1</span>
                     <span class="entry_type_enum_notes"><p>Each new frame has some subset (potentially the entire set)
 of the past requests applied to the camera settings.<wbr/></p>
@@ -26538,6 +28899,10 @@ result's state becomes synchronized.<wbr/></p>
               <p>A positive value,<wbr/> PER_<wbr/>FRAME_<wbr/>CONTROL,<wbr/> or UNKNOWN.<wbr/></p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_V1">V1</a></li>
@@ -26546,10 +28911,10 @@ result's state becomes synchronized.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This defines the maximum distance (in number of metadata results),<wbr/>
 between the frame number of the request that has new controls to apply
 and the frame number of the result that has all the controls applied.<wbr/></p>
@@ -26560,10 +28925,10 @@ submitted camera settings have been applied in outgoing frames.<wbr/></p>
           </tr>
 
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>For example if maxLatency was 2,<wbr/></p>
 <pre><code>initial request = X (repeating)
 request1 = X
@@ -26594,7 +28959,7 @@ to know when sensor settings have been applied.<wbr/></p>
             </td>
           </tr>
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -26603,10 +28968,10 @@ to know when sensor settings have been applied.<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_reprocess" class="section">reprocess</td></tr>
+  <tr><td colspan="7" id="section_reprocess" class="section">reprocess</td></tr>
 
 
-      <tr><td colspan="6" class="kind">controls</td></tr>
+      <tr><td colspan="7" class="kind">controls</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26615,6 +28980,7 @@ to know when sensor settings have been applied.<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26661,6 +29027,10 @@ frame by the application processing before sending for reprocessing.<wbr/></p>
               <p>&gt;= 1.<wbr/>0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_REPROC">REPROC</a></li>
@@ -26669,10 +29039,10 @@ frame by the application processing before sending for reprocessing.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is optional,<wbr/> and will be supported if the camera device supports YUV_<wbr/>REPROCESSING
 capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains YUV_<wbr/>REPROCESSING).<wbr/></p>
 <p>For some YUV reprocessing use cases,<wbr/> the application may choose to filter the original
@@ -26707,14 +29077,14 @@ Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective w
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">dynamic</td></tr>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26723,6 +29093,7 @@ Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective w
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26769,6 +29140,10 @@ frame by the application processing before sending for reprocessing.<wbr/></p>
               <p>&gt;= 1.<wbr/>0</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_REPROC">REPROC</a></li>
@@ -26777,10 +29152,10 @@ frame by the application processing before sending for reprocessing.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This is optional,<wbr/> and will be supported if the camera device supports YUV_<wbr/>REPROCESSING
 capability (<a href="#static_android.request.availableCapabilities">android.<wbr/>request.<wbr/>available<wbr/>Capabilities</a> contains YUV_<wbr/>REPROCESSING).<wbr/></p>
 <p>For some YUV reprocessing use cases,<wbr/> the application may choose to filter the original
@@ -26815,14 +29190,14 @@ Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective w
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
 
       <!-- end of kind -->
       </tbody>
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26831,6 +29206,7 @@ Similarly,<wbr/> for edge enhancement reprocessing,<wbr/> it is only effective w
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26877,6 +29253,10 @@ reprocess capture request.<wbr/></p>
               <p>&lt;= 4</p>
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_REPROC">REPROC</a></li>
@@ -26885,10 +29265,10 @@ reprocess capture request.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>The key describes the maximal interference that one reprocess (input) request
 can introduce to the camera simultaneous streaming of regular (output) capture
 requests,<wbr/> including repeating requests.<wbr/></p>
@@ -26910,7 +29290,7 @@ YUV_<wbr/>REPROCESSING).<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -26919,10 +29299,10 @@ YUV_<wbr/>REPROCESSING).<wbr/></p>
       </tbody>
 
   <!-- end of section -->
-  <tr><td colspan="6" id="section_depth" class="section">depth</td></tr>
+  <tr><td colspan="7" id="section_depth" class="section">depth</td></tr>
 
 
-      <tr><td colspan="6" class="kind">static</td></tr>
+      <tr><td colspan="7" class="kind">static</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -26931,6 +29311,7 @@ YUV_<wbr/>REPROCESSING).<wbr/></p>
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -26974,6 +29355,10 @@ YUV_<wbr/>REPROCESSING).<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -26982,10 +29367,10 @@ YUV_<wbr/>REPROCESSING).<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If a camera device supports outputting depth range data in the form of a depth point
 cloud (<a href="https://developer.android.com/reference/android/graphics/ImageFormat.html#DEPTH_POINT_CLOUD">Image<wbr/>Format#DEPTH_<wbr/>POINT_<wbr/>CLOUD</a>),<wbr/> this is the maximum
 number of points an output buffer may contain.<wbr/></p>
@@ -26996,7 +29381,7 @@ not be defined.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -27021,10 +29406,10 @@ not be defined.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">OUTPUT</span>
+                    <span class="entry_type_enum_name">OUTPUT (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">INPUT</span>
+                    <span class="entry_type_enum_name">INPUT (v3.2)</span>
                   </li>
                 </ul>
 
@@ -27042,6 +29427,10 @@ configurations that this camera device supports
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -27050,10 +29439,10 @@ configurations that this camera device supports
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>These are output stream configurations for use with
 dataSpace HAL_<wbr/>DATASPACE_<wbr/>DEPTH.<wbr/> The configurations are
 listed as <code>(format,<wbr/> width,<wbr/> height,<wbr/> input?)</code> tuples.<wbr/></p>
@@ -27069,7 +29458,7 @@ the entries for HAL_<wbr/>PIXEL_<wbr/>FORMAT_<wbr/>Y16.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -27107,6 +29496,10 @@ format/<wbr/>size combination for depth output formats.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -27115,10 +29508,10 @@ format/<wbr/>size combination for depth output formats.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>This should correspond to the frame duration when only that
 stream is active,<wbr/> with all processing (typically in android.<wbr/>*.<wbr/>mode)
 set to either OFF or FAST.<wbr/></p>
@@ -27129,12 +29522,11 @@ is the same regardless of whether the stream is input or output.<wbr/></p>
 <p>See <a href="#controls_android.sensor.frameDuration">android.<wbr/>sensor.<wbr/>frame<wbr/>Duration</a> and
 <a href="#static_android.scaler.availableStallDurations">android.<wbr/>scaler.<wbr/>available<wbr/>Stall<wbr/>Durations</a> for more details about
 calculating the max frame rate.<wbr/></p>
-<p>(Keep in sync with <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html#getOutputMinFrameDuration">StreamConfigurationMap#getOutputMinFrameDuration</a>)</p>
             </td>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -27172,6 +29564,10 @@ output format/<wbr/>size combination for depth streams.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
               <ul class="entry_tags">
                   <li><a href="#tag_DEPTH">DEPTH</a></li>
@@ -27180,10 +29576,10 @@ output format/<wbr/>size combination for depth streams.<wbr/></p>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>A stall duration is how much extra time would get added
 to the normal minimum frame duration for a repeating request
 that has streams with non-zero stall.<wbr/></p>
@@ -27196,7 +29592,7 @@ duration.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
                 
@@ -27217,10 +29613,10 @@ duration.<wbr/></p>
 
                 <ul class="entry_type_enum">
                   <li>
-                    <span class="entry_type_enum_name">FALSE</span>
+                    <span class="entry_type_enum_name">FALSE (v3.2)</span>
                   </li>
                   <li>
-                    <span class="entry_type_enum_name">TRUE</span>
+                    <span class="entry_type_enum_name">TRUE (v3.2)</span>
                   </li>
                 </ul>
 
@@ -27238,15 +29634,19 @@ YUV_<wbr/>420_<wbr/>888,<wbr/> JPEG,<wbr/> or RAW) simultaneously.<wbr/></p>
             <td class="entry_range">
             </td>
 
+            <td class="entry_hal_version">
+              <p>3.<wbr/>2</p>
+            </td>
+
             <td class="entry_tags">
             </td>
 
           </tr>
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               <p>If TRUE,<wbr/> including both depth and color outputs in a single
 capture request is not supported.<wbr/> An application must interleave color
 and depth requests.<wbr/>  If FALSE,<wbr/> a single request can target both types
@@ -27259,7 +29659,510 @@ corrupted during depth measurement.<wbr/></p>
           </tr>
 
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="7" id="section_logicalMultiCamera" class="section">logicalMultiCamera</td></tr>
+
+
+      <tr><td colspan="7" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.logicalMultiCamera.physicalIds">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>logical<wbr/>Multi<wbr/>Camera.<wbr/>physical<wbr/>Ids
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [hidden]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>String containing the ids of the underlying physical cameras.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+              UTF-8 null-terminated string
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_LOGICALCAMERA">LOGICALCAMERA</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>For a logical camera,<wbr/> this is concatenation of all underlying physical camera ids.<wbr/>
+The null terminator for physical camera id must be preserved so that the whole string
+can be tokenized using '\0' to generate list of physical camera ids.<wbr/></p>
+<p>For example,<wbr/> if the physical camera ids of the logical camera are "2" and "3",<wbr/> the
+value of this tag will be ['2',<wbr/> '\0',<wbr/> '3',<wbr/> '\0'].<wbr/></p>
+<p>The number of physical camera ids must be no less than 2.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+                
+          <tr class="entry" id="static_android.logicalMultiCamera.sensorSyncType">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>logical<wbr/>Multi<wbr/>Camera.<wbr/>sensor<wbr/>Sync<wbr/>Type
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+              <span class="entry_type_hwlevel">[limited] </span>
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">APPROXIMATE (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>A software mechanism is used to synchronize between the physical cameras.<wbr/> As a result,<wbr/>
+the timestamp of an image from a physical stream is only an approximation of the
+image sensor start-of-exposure time.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">CALIBRATED (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>The camera device supports frame timestamp synchronization at the hardware level,<wbr/>
+and the timestamp of a physical stream image accurately reflects its
+start-of-exposure time.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>The accuracy of frame timestamp synchronization between physical cameras</p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_LOGICALCAMERA">LOGICALCAMERA</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The accuracy of the frame timestamp synchronization determines the physical cameras'
+ability to start exposure at the same time.<wbr/> If the sensorSyncType is CALIBRATED,<wbr/>
+the physical camera sensors usually run in master-slave mode so that their shutter
+time is synchronized.<wbr/> For APPROXIMATE sensorSyncType,<wbr/> the camera sensors usually run in
+master-master mode,<wbr/> and there could be offset between their start of exposure.<wbr/></p>
+<p>In both cases,<wbr/> all images generated for a particular capture request still carry the same
+timestamps,<wbr/> so that they can be used to look up the matching frame number and
+onCaptureStarted callback.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+
+  <!-- end of section -->
+  <tr><td colspan="7" id="section_distortionCorrection" class="section">distortionCorrection</td></tr>
+
+
+      <tr><td colspan="7" class="kind">controls</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="controls_android.distortionCorrection.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>distortion<wbr/>Correction.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>No distortion correction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Lens distortion correction is applied without reducing frame rate
+relative to sensor output.<wbr/> It may be the same as OFF if distortion correction would
+reduce frame rate relative to sensor.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>High-quality distortion correction is applied,<wbr/> at the cost of
+possibly reduced frame rate relative to sensor output.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Mode of operation for the lens distortion correction block.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.distortionCorrection.availableModes">android.<wbr/>distortion<wbr/>Correction.<wbr/>available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The lens distortion correction block attempts to improve image quality by fixing
+radial,<wbr/> tangential,<wbr/> or other geometric aberrations in the camera device's optics.<wbr/>  If
+available,<wbr/> the <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a> field documents the lens's distortion parameters.<wbr/></p>
+<p>OFF means no distortion correction is done.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined distortion correction will be
+applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device will use the highest-quality
+correction algorithms,<wbr/> even if it slows down capture rate.<wbr/> FAST means the camera device
+will not slow down capture rate when applying correction.<wbr/> FAST may be the same as OFF if
+any correction at all would slow down capture rate.<wbr/>  Every output stream will have a
+similar amount of enhancement applied.<wbr/></p>
+<p>The correction only applies to processed outputs such as YUV,<wbr/> JPEG,<wbr/> or DEPTH16; it is not
+applied to any RAW output.<wbr/>  Metadata coordinates such as face rectangles or metering
+regions are also not affected by correction.<wbr/></p>
+<p>Applications enabling distortion correction need to pay extra attention when converting
+image coordinates between corrected output buffers and the sensor array.<wbr/> For example,<wbr/> if
+the app supports tap-to-focus and enables correction,<wbr/> it then has to apply the distortion
+model described in <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a> to the image buffer tap coordinates to properly
+calculate the tap position on the sensor active array to be used with
+<a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a>.<wbr/> The same applies in reverse to detected face rectangles if
+they need to be drawn on top of the corrected output buffers.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="7" class="kind">static</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="static_android.distortionCorrection.availableModes">
+            <td class="entry_name
+             " rowspan="5">
+              android.<wbr/>distortion<wbr/>Correction.<wbr/>available<wbr/>Modes
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name">byte</span>
+                <span class="entry_type_container">x</span>
+
+                <span class="entry_type_array">
+                  n
+                </span>
+              <span class="entry_type_visibility"> [public as enumList]</span>
+
+
+
+
+                <div class="entry_type_notes">list of enums</div>
+
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>List of distortion correction modes for <a href="#controls_android.distortionCorrection.mode">android.<wbr/>distortion<wbr/>Correction.<wbr/>mode</a> that are
+supported by this camera device.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p>Any value listed in <a href="#controls_android.distortionCorrection.mode">android.<wbr/>distortion<wbr/>Correction.<wbr/>mode</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+              <ul class="entry_tags">
+                  <li><a href="#tag_V1">V1</a></li>
+                  <li><a href="#tag_REPROC">REPROC</a></li>
+              </ul>
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>No device is required to support this API; such devices will always list only 'OFF'.<wbr/>
+All devices that support this API will list both FAST and HIGH_<wbr/>QUALITY.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>HAL must support both FAST and HIGH_<wbr/>QUALITY if distortion correction is available
+on the camera device,<wbr/> but the underlying implementation can be the same for both modes.<wbr/>
+That is,<wbr/> if the highest quality implementation on the camera device does not slow down
+capture rate,<wbr/> then FAST and HIGH_<wbr/>QUALITY will generate the same output.<wbr/></p>
+            </td>
+          </tr>
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
+           <!-- end of entry -->
+        
+        
+
+      <!-- end of kind -->
+      </tbody>
+      <tr><td colspan="7" class="kind">dynamic</td></tr>
+
+      <thead class="entries_header">
+        <tr>
+          <th class="th_name">Property Name</th>
+          <th class="th_type">Type</th>
+          <th class="th_description">Description</th>
+          <th class="th_units">Units</th>
+          <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
+          <th class="th_tags">Tags</th>
+        </tr>
+      </thead>
+
+      <tbody>
+
+        
+
+        
+
+        
+
+        
+
+                
+          <tr class="entry" id="dynamic_android.distortionCorrection.mode">
+            <td class="entry_name
+             " rowspan="3">
+              android.<wbr/>distortion<wbr/>Correction.<wbr/>mode
+            </td>
+            <td class="entry_type">
+                <span class="entry_type_name entry_type_name_enum">byte</span>
+
+              <span class="entry_type_visibility"> [public]</span>
+
+
+
+
+
+                <ul class="entry_type_enum">
+                  <li>
+                    <span class="entry_type_enum_name">OFF (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>No distortion correction is applied.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">FAST (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>Lens distortion correction is applied without reducing frame rate
+relative to sensor output.<wbr/> It may be the same as OFF if distortion correction would
+reduce frame rate relative to sensor.<wbr/></p></span>
+                  </li>
+                  <li>
+                    <span class="entry_type_enum_name">HIGH_QUALITY (v3.3)</span>
+                    <span class="entry_type_enum_notes"><p>High-quality distortion correction is applied,<wbr/> at the cost of
+possibly reduced frame rate relative to sensor output.<wbr/></p></span>
+                  </li>
+                </ul>
+
+            </td> <!-- entry_type -->
+
+            <td class="entry_description">
+              <p>Mode of operation for the lens distortion correction block.<wbr/></p>
+            </td>
+
+            <td class="entry_units">
+            </td>
+
+            <td class="entry_range">
+              <p><a href="#static_android.distortionCorrection.availableModes">android.<wbr/>distortion<wbr/>Correction.<wbr/>available<wbr/>Modes</a></p>
+            </td>
+
+            <td class="entry_hal_version">
+              <p>3.<wbr/>3</p>
+            </td>
+
+            <td class="entry_tags">
+            </td>
+
+          </tr>
+          <tr class="entries_header">
+            <th class="th_details" colspan="6">Details</th>
+          </tr>
+          <tr class="entry_cont">
+            <td class="entry_details" colspan="6">
+              <p>The lens distortion correction block attempts to improve image quality by fixing
+radial,<wbr/> tangential,<wbr/> or other geometric aberrations in the camera device's optics.<wbr/>  If
+available,<wbr/> the <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a> field documents the lens's distortion parameters.<wbr/></p>
+<p>OFF means no distortion correction is done.<wbr/></p>
+<p>FAST/<wbr/>HIGH_<wbr/>QUALITY both mean camera device determined distortion correction will be
+applied.<wbr/> HIGH_<wbr/>QUALITY mode indicates that the camera device will use the highest-quality
+correction algorithms,<wbr/> even if it slows down capture rate.<wbr/> FAST means the camera device
+will not slow down capture rate when applying correction.<wbr/> FAST may be the same as OFF if
+any correction at all would slow down capture rate.<wbr/>  Every output stream will have a
+similar amount of enhancement applied.<wbr/></p>
+<p>The correction only applies to processed outputs such as YUV,<wbr/> JPEG,<wbr/> or DEPTH16; it is not
+applied to any RAW output.<wbr/>  Metadata coordinates such as face rectangles or metering
+regions are also not affected by correction.<wbr/></p>
+<p>Applications enabling distortion correction need to pay extra attention when converting
+image coordinates between corrected output buffers and the sensor array.<wbr/> For example,<wbr/> if
+the app supports tap-to-focus and enables correction,<wbr/> it then has to apply the distortion
+model described in <a href="#static_android.lens.distortion">android.<wbr/>lens.<wbr/>distortion</a> to the image buffer tap coordinates to properly
+calculate the tap position on the sensor active array to be used with
+<a href="#controls_android.control.afRegions">android.<wbr/>control.<wbr/>af<wbr/>Regions</a>.<wbr/> The same applies in reverse to detected face rectangles if
+they need to be drawn on top of the corrected output buffers.<wbr/></p>
+            </td>
+          </tr>
+
+
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         
         
@@ -27390,6 +30293,7 @@ corrupted during depth measurement.<wbr/></p>
           <li><a href="#dynamic_android.statistics.hotPixelMap">android.statistics.hotPixelMap</a> (dynamic)</li>
           <li><a href="#dynamic_android.sync.frameNumber">android.sync.frameNumber</a> (dynamic)</li>
           <li><a href="#static_android.sync.maxLatency">android.sync.maxLatency</a> (static)</li>
+          <li><a href="#static_android.distortionCorrection.availableModes">android.distortionCorrection.availableModes</a> (static)</li>
           <li><a href="#dynamic_android.edge.mode">android.edge.mode</a> (dynamic)</li>
           <li><a href="#dynamic_android.hotPixel.mode">android.hotPixel.mode</a> (dynamic)</li>
           <li><a href="#dynamic_android.lens.aperture">android.lens.aperture</a> (dynamic)</li>
@@ -27434,7 +30338,7 @@ corrupted during depth measurement.<wbr/></p>
         </ul>
       </li> <!-- tag_RAW -->
       <li id="tag_HAL2">HAL2 - 
-        Entry is only used by camera device HAL 2.x
+        Entry is only used by camera device legacy HAL 2.x
     
         <ul class="tags_entries">
           <li><a href="#controls_android.request.inputStreams">android.request.inputStreams</a> (controls)</li>
@@ -27459,6 +30363,7 @@ corrupted during depth measurement.<wbr/></p>
           <li><a href="#static_android.lens.poseTranslation">android.lens.poseTranslation</a> (static)</li>
           <li><a href="#static_android.lens.intrinsicCalibration">android.lens.intrinsicCalibration</a> (static)</li>
           <li><a href="#static_android.lens.radialDistortion">android.lens.radialDistortion</a> (static)</li>
+          <li><a href="#static_android.lens.distortion">android.lens.distortion</a> (static)</li>
           <li><a href="#static_android.depth.maxDepthSamples">android.depth.maxDepthSamples</a> (static)</li>
           <li><a href="#static_android.depth.availableDepthStreamConfigurations">android.depth.availableDepthStreamConfigurations</a> (static)</li>
           <li><a href="#static_android.depth.availableDepthMinFrameDurations">android.depth.availableDepthMinFrameDurations</a> (static)</li>
@@ -27477,10 +30382,19 @@ corrupted during depth measurement.<wbr/></p>
           <li><a href="#static_android.scaler.availableInputOutputFormatsMap">android.scaler.availableInputOutputFormatsMap</a> (static)</li>
           <li><a href="#controls_android.reprocess.effectiveExposureFactor">android.reprocess.effectiveExposureFactor</a> (controls)</li>
           <li><a href="#static_android.reprocess.maxCaptureStall">android.reprocess.maxCaptureStall</a> (static)</li>
+          <li><a href="#static_android.distortionCorrection.availableModes">android.distortionCorrection.availableModes</a> (static)</li>
           <li><a href="#dynamic_android.edge.mode">android.edge.mode</a> (dynamic)</li>
           <li><a href="#dynamic_android.noiseReduction.mode">android.noiseReduction.mode</a> (dynamic)</li>
         </ul>
       </li> <!-- tag_REPROC -->
+      <li id="tag_LOGICALCAMERA">LOGICALCAMERA - 
+        Entry is required for logical multi-camera capability.
+    
+        <ul class="tags_entries">
+          <li><a href="#static_android.logicalMultiCamera.physicalIds">android.logicalMultiCamera.physicalIds</a> (static)</li>
+          <li><a href="#static_android.logicalMultiCamera.sensorSyncType">android.logicalMultiCamera.sensorSyncType</a> (static)</li>
+        </ul>
+      </li> <!-- tag_LOGICALCAMERA -->
       <li id="tag_FUTURE">FUTURE - 
         Entry is  under-specified and is not required for now. This is for book-keeping purpose,
         do not implement or use it, it may be revised for future.
index 113a38d..3ba191b 100644 (file)
     .th_units { width: 10% }
     .th_tags { width: 5% }
     .th_details { width: 25% }
-    .th_type { width: 20% }
+    .th_type { width: 17% }
     .th_description { width: 20% }
-    .th_range { width: 10% }
+    .th_range { width: 8% }
+    .th_hal_version { width: 5% }
     td { font-size: 0.9em; }
 
     /* hide the first thead, we need it there only to enforce column sizes */
 
   # Convert target "xxx.yyy#zzz" to a HTML reference to Android public developer
   # docs with link name from shortname.
-  def html_link(target, shortname):
+  def html_link(target, target_ndk, shortname):
     if shortname == '':
       lastdot = target.rfind('.')
       if lastdot == -1:
@@ -203,6 +204,7 @@ ${          insert_toc_body(kind)}\
         <th class="th_description">Description</th>
         <th class="th_units">Units</th>
         <th class="th_range">Range</th>
+        <th class="th_hal_version">HIDL HAL version</th>
         <th class="th_tags">Tags</th>
       </tr>
     </thead> <!-- so that the first occurrence of thead is not
@@ -210,14 +212,14 @@ ${          insert_toc_body(kind)}\
 % for root in metadata.outer_namespaces:
 <!-- <namespace name="${root.name}"> -->
   % for section in root.sections:
-  <tr><td colspan="6" id="section_${section.name}" class="section">${section.name}</td></tr>
+  <tr><td colspan="7" id="section_${section.name}" class="section">${section.name}</td></tr>
 
     % if section.description is not None:
       <tr class="description"><td>${section.description}</td></tr>
     % endif
 
     % for kind in section.merged_kinds: # dynamic,static,controls
-      <tr><td colspan="6" class="kind">${kind.name}</td></tr>
+      <tr><td colspan="7" class="kind">${kind.name}</td></tr>
 
       <thead class="entries_header">
         <tr>
@@ -226,6 +228,7 @@ ${          insert_toc_body(kind)}\
           <th class="th_description">Description</th>
           <th class="th_units">Units</th>
           <th class="th_range">Range</th>
+          <th class="th_hal_version">Initial HIDL HAL version</th>
           <th class="th_tags">Tags</th>
         </tr>
       </thead>
@@ -298,7 +301,7 @@ ${          insert_toc_body(kind)}\
                 <ul class="entry_type_enum">
                   % for value in prop.enum.values:
                   <li>
-                    <span class="entry_type_enum_name">${value.name}</span>
+                    <span class="entry_type_enum_name">${value.name} (v${value.hal_major_version}.${value.hal_minor_version})</span>
                   % if value.deprecated:
                     <span class="entry_type_enum_deprecated">[deprecated]</span>
                   % endif:
@@ -342,6 +345,10 @@ ${          insert_toc_body(kind)}\
             % endif
             </td>
 
+            <td class="entry_hal_version">
+              ${"%d.%d" % (prop.hal_major_version, prop.hal_minor_version) | md_html, linkify_tags(metadata), wbr}
+            </td>
+
             <td class="entry_tags">
             % if next(prop.tags, None):
               <ul class="entry_tags">
@@ -355,10 +362,10 @@ ${          insert_toc_body(kind)}\
           </tr>
           % if prop.details is not None:
           <tr class="entries_header">
-            <th class="th_details" colspan="5">Details</th>
+            <th class="th_details" colspan="6">Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               ${prop.details | md_html, linkify_tags(metadata), wbr}
             </td>
           </tr>
@@ -366,16 +373,16 @@ ${          insert_toc_body(kind)}\
 
           % if prop.hal_details is not None:
           <tr class="entries_header">
-            <th class="th_details" colspan="5">HAL Implementation Details</th>
+            <th class="th_details" colspan="6">HAL Implementation Details</th>
           </tr>
           <tr class="entry_cont">
-            <td class="entry_details" colspan="5">
+            <td class="entry_details" colspan="6">
               ${prop.hal_details | md_html, linkify_tags(metadata), wbr}
             </td>
           </tr>
           % endif
 
-          <tr class="entry_spacer"><td class="entry_spacer" colspan="6"></td></tr>
+          <tr class="entry_spacer"><td class="entry_spacer" colspan="7"></td></tr>
            <!-- end of entry -->
         </%def>
 
index 56f2e27..3ad7b1b 100755 (executable)
@@ -109,6 +109,7 @@ binary_check tidy tidy tidy
 binary_check python python27 python2.7
 python_check bs4 py27-beautifulsoup4 python-bs4
 python_check mako py27-mako python-mako
+python_check markdown py27-markdown python-markdown
 
 exit $retcode
 
index 4912421..6999da9 100755 (executable)
@@ -35,7 +35,8 @@ fi
 
 thisdir=$(cd "$(dirname "$0")"; pwd)
 fwkdir="$ANDROID_BUILD_TOP/frameworks/base/core/java/android/hardware/camera2/"
-fwkdir_html="$ANDROID_BUILD_TOP/frameworks/base/docs/html"
+fwkdir_html="$ANDROID_BUILD_TOP/frameworks/base/docs/html/reference"
+hidldir="$ANDROID_BUILD_TOP/hardware/interfaces/camera/metadata"
 ctsdir="$ANDROID_BUILD_TOP/cts/tests/camera/src/android/hardware/camera2/cts"
 outdir="$ANDROID_PRODUCT_OUT/obj/ETC/system-media-camera-docs_intermediates"
 ndk_header_dir="$ANDROID_BUILD_TOP/frameworks/av/camera/ndk/include/camera"
@@ -61,14 +62,10 @@ function gen_file_abs() {
     local in="$1"
     local out="$2"
     local intermediates="$3"
-    local ndk="$4"
-    local spec_file=$thisdir/metadata_properties.xml
+    local hal_version="${4:-3.2}"
+    local spec_file=$thisdir/metadata_definitions.xml
 
-    if [[ "$ndk" == "yes" ]]; then
-      spec_file=$thisdir/ndk_metadata_properties.xml
-    fi
-
-    python $thisdir/metadata_parser_xml.py $spec_file $in $out
+    python $thisdir/metadata_parser_xml.py $spec_file $in $out $hal_version
 
     local succ=$?
 
@@ -192,7 +189,7 @@ function copy_directory() {
 }
 
 $thisdir/metadata-check-dependencies || exit 1
-$thisdir/metadata-validate $thisdir/metadata_properties.xml || exit 1
+$thisdir/metadata-validate $thisdir/metadata_definitions.xml || exit 1
 $thisdir/metadata-parser-sanity-check || exit 1
 
 # Generate HTML properties documentation
@@ -202,8 +199,14 @@ gen_file html.mako docs.html || exit 1
 gen_file camera_metadata_tag_info.mako ../src/camera_metadata_tag_info.c || exit 1
 gen_file camera_metadata_tags.mako ../include/system/camera_metadata_tags.h || exit 1
 
+# Generate HIDL metadata modules - new versions need to be added here manually
+mkdir -p "${hidldir}/3.2"
+gen_file_abs HidlMetadata.mako "$hidldir/3.2/types.hal" yes 3.2 || exit 1
+mkdir -p "${hidldir}/3.3"
+gen_file_abs HidlMetadata.mako "$hidldir/3.3/types.hal" yes 3.3 || exit 1
+
 #Generate NDK header
-gen_file_abs ndk_camera_metadata_tags.mako "$ndk_header_dir/NdkCameraMetadataTags.h" yes yes || exit 1
+gen_file_abs ndk_camera_metadata_tags.mako "$ndk_header_dir/NdkCameraMetadataTags.h" yes || exit 1
 
 # Generate Java API definitions
 mkdir -p "${outdir}"
index 386960a..efc9ce6 100755 (executable)
@@ -47,8 +47,8 @@ function check_test
 
 check_test "$thisdir/metadata_model_test.py" || exit 1
 check_test "$thisdir/metadata_helpers_test.py" || exit 1
-python $thisdir/metadata_parser_xml.py $thisdir/metadata_properties.xml $thisdir/metadata_template.mako $tmp_out || exit 1
-tidy -indent -xml -quiet $thisdir/metadata_properties.xml > $tmp_tidy1
+python $thisdir/metadata_parser_xml.py $thisdir/metadata_definitions.xml $thisdir/metadata_template.mako $tmp_out || exit 1
+tidy -indent -xml -quiet $thisdir/metadata_definitions.xml > $tmp_tidy1
 tidy -indent -xml -quiet $tmp_out > $tmp_tidy2
 
 diff $tmp_tidy1 $tmp_tidy2
index a7755ad..78d6823 100755 (executable)
@@ -25,7 +25,7 @@ then
     exit
 fi
 
-schema=$thisdir/metadata_properties.xsd
+schema=$thisdir/metadata_definitions.xsd
 doc=$1
 
 xmllint --noout --schema $schema $doc || exit 1
similarity index 89%
rename from camera/docs/metadata_properties.xml
rename to camera/docs/metadata_definitions.xml
index 291afec..a224dd3 100644 (file)
@@ -28,7 +28,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
         Needed for useful RAW image processing and DNG file support
     </tag>
     <tag id="HAL2">
-        Entry is only used by camera device HAL 2.x
+        Entry is only used by camera device legacy HAL 2.x
     </tag>
     <tag id="FULL">
         Entry is required for full hardware level devices, and optional for other hardware levels
@@ -39,6 +39,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
     <tag id="REPROC">
         Entry is required for the YUV or PRIVATE reprocessing capability.
     </tag>
+    <tag id="LOGICALCAMERA">
+        Entry is required for logical multi-camera capability.
+    </tag>
     <tag id="FUTURE">
         Entry is  under-specified and is not required for now. This is for book-keeping purpose,
         do not implement or use it, it may be revised for future.
@@ -124,6 +127,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
     <typedef name="reprocessFormatsMap">
       <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
     </typedef>
+    <typedef name="oisSample">
+      <language name="java">android.hardware.camera2.params.OisSample</language>
+    </typedef>
   </types>
 
   <namespace name="android">
@@ -627,6 +633,22 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
                 sequence.
               </notes>
             </value>
+            <value hal_version="3.3">ON_EXTERNAL_FLASH
+              <notes>
+                An external flash has been turned on.
+
+                It informs the camera device that an external flash has been turned on, and that
+                metering (and continuous focus if active) should be quickly recaculated to account
+                for the external flash. Otherwise, this mode acts like ON.
+
+                When the external flash is turned off, AE mode should be changed to one of the
+                other available AE modes.
+
+                If the camera device supports AE external flash mode, android.control.aeState must
+                be FLASH_REQUIRED after the camera device finishes AE scan and it's too dark without
+                flash.
+              </notes>
+            </value>
           </enum>
           <description>The desired mode for the camera device's
           auto-exposure routine.</description>
@@ -699,6 +721,12 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               metadata.  If the region is entirely outside the crop region, it will be ignored and
               not reported in the result metadata.
           </details>
+          <ndk_details>
+              The data representation is `int[5 * area_count]`.
+              Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`.
+              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+              ymax.
+          </ndk_details>
           <hal_details>
               The HAL level representation of MeteringRectangle[] is a
               int[5 * area_count].
@@ -968,7 +996,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               ignored.
 
               If all regions have 0 weight, then no specific metering area needs to be used by the
-              camera device.
+              camera device. The capture result will either be a zero weight region as well, or
+              the region selected by the camera device as the focus area of interest.
 
               If the metering region is outside the used android.scaler.cropRegion returned in
               capture result metadata, the camera device will ignore the sections outside the crop
@@ -976,6 +1005,12 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               metadata. If the region is entirely outside the crop region, it will be ignored and
               not reported in the result metadata.
           </details>
+          <ndk_details>
+              The data representation is `int[5 * area_count]`.
+              Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`.
+              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+              ymax.
+          </ndk_details>
           <hal_details>
               The HAL level representation of MeteringRectangle[] is a
               int[5 * area_count].
@@ -1279,6 +1314,12 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               metadata.  If the region is entirely outside the crop region, it will be ignored and
               not reported in the result metadata.
           </details>
+          <ndk_details>
+              The data representation is `int[5 * area_count]`.
+              Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`.
+              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
+              ymax.
+          </ndk_details>
           <hal_details>
               The HAL level representation of MeteringRectangle[] is a
               int[5 * area_count].
@@ -1331,6 +1372,14 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             For example, the application may wish to manually control
             android.sensor.exposureTime, android.sensor.sensitivity, etc.
             </notes></value>
+            <value hal_version="3.3">MOTION_TRACKING
+            <notes>This request is for a motion tracking use case, where
+            the application will use camera and inertial sensor data to
+            locate and track objects in the world.
+
+            The camera device auto-exposure routine will limit the exposure time
+            of the camera to no more than 20 milliseconds, to minimize motion blur.
+            </notes></value>
           </enum>
           <description>Information to the camera device 3A (auto-exposure,
           auto-focus, auto-white balance) routines about the purpose
@@ -1339,10 +1388,13 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <details>This control (except for MANUAL) is only effective if
           `android.control.mode != OFF` and any 3A routine is active.
 
-          ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
-          contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
-          android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
-          always supported.
+          All intents are supported by all devices, except that:
+            * ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities contains
+          PRIVATE_REPROCESSING or YUV_REPROCESSING.
+            * MANUAL will be supported if android.request.availableCapabilities contains
+          MANUAL_SENSOR.
+            * MOTION_TRACKING will be supported if android.request.availableCapabilities contains
+          MOTION_TRACKING.
           </details>
           <tag id="BC" />
         </entry>
@@ -1477,10 +1529,12 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           android.control.* are in effect, such as android.control.afMode.
 
           When set to USE_SCENE_MODE, the individual controls in
-          android.control.* are mostly disabled, and the camera device implements
-          one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
-          as it wishes. The camera device scene mode 3A settings are provided by
-          {@link android.hardware.camera2.CaptureResult capture results}.
+          android.control.* are mostly disabled, and the camera device
+          implements one of the scene mode settings (such as ACTION,
+          SUNSET, or PARTY) as it wishes. The camera device scene mode
+          3A settings are provided by {@link
+          android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result
+          capture results}.
 
           When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
           is that this frame will not be used by camera device background 3A statistics
@@ -1599,7 +1653,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               barcode value.
               </notes>
             </value>
-            <value deprecated="true" optional="true">HIGH_SPEED_VIDEO
+            <value deprecated="true" optional="true" ndk_hidden="true">HIGH_SPEED_VIDEO
               <notes>
               This is deprecated, please use {@link
               android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
@@ -1787,13 +1841,13 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           android.control.awbMode, and android.control.afMode in
           android.control.sceneModeOverrides.
 
-          For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes,
-          the HAL must list supported video size and fps range in
-          android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.
-          1280x720, if the HAL has two different sensor configurations for normal streaming
-          mode and high speed streaming, when this scene mode is set/reset in a sequence of capture
-          requests, the HAL may have to switch between different sensor modes.
-          This mode is deprecated in HAL3.3, to support high speed video recording, please implement
+          For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes, the
+          HAL must list supported video size and fps range in
+          android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.  1280x720,
+          if the HAL has two different sensor configurations for normal streaming mode and high
+          speed streaming, when this scene mode is set/reset in a sequence of capture requests, the
+          HAL may have to switch between different sensor modes.  This mode is deprecated in legacy
+          HAL3.3, to support high speed video recording, please implement
           android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
           capbility defined in android.request.availableCapabilities.
           </hal_details>
@@ -2214,6 +2268,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
         <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
           <description>The ID sent with the latest
           CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
+          <deprecation_description>
+            Removed in camera HAL v3
+          </deprecation_description>
           <details>Must be 0 if no
           CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
           by HAL. Always updated even if AE algorithm ignores the
@@ -2292,7 +2349,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           :------------:|:----------------:|:---------:|:-----------------------:
           INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled
 
-          When android.control.aeMode is AE_MODE_ON_*:
+          When android.control.aeMode is AE_MODE_ON*:
 
             State        | Transition Cause                             | New State      | Notes
           :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
@@ -2315,11 +2372,15 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE     | Start AE precapture metering sequence
           Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled
 
+          If the camera device supports AE external flash mode (ON_EXTERNAL_FLASH is included in
+          android.control.aeAvailableModes), android.control.aeState must be FLASH_REQUIRED after
+          the camera device finishes AE scan and it's too dark without flash.
+
           For the above table, the camera device may skip reporting any state changes that happen
           without application intervention (i.e. mode switch, trigger, locking). Any state that
           can be skipped in that manner is called a transient state.
 
-          For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
+          For example, for above AE modes (AE_MODE_ON*), in addition to the state transitions
           listed in above table, it is also legal for the camera device to skip one or more
           transient states between two results. See below table for examples:
 
@@ -2512,6 +2573,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
         <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
           <description>The ID sent with the latest
           CAMERA2_TRIGGER_AUTOFOCUS call</description>
+          <deprecation_description>
+            Removed in camera HAL v3
+          </deprecation_description>
           <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
           received yet by HAL. Always updated even if AF algorithm
           ignores the trigger</details>
@@ -2749,10 +2813,10 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             list `(100, 100)` in this key.
           </details>
           <hal_details>
-             This key is added in HAL3.4. For HAL3.3 or earlier devices, camera framework will
-             generate this key as `(100, 100)` if device supports any of RAW output formats.
-             All HAL3.4 and above devices should list this key if device supports any of RAW
-             output formats.
+             This key is added in legacy HAL3.4. For legacy HAL3.3 or earlier devices, camera
+             framework will generate this key as `(100, 100)` if device supports any of RAW output
+             formats.  All legacy HAL3.4 and above devices should list this key if device supports
+             any of RAW output formats.
           </hal_details>
         </entry>
       </static>
@@ -2843,6 +2907,25 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
       <dynamic>
         <clone entry="android.control.enableZsl" kind="controls">
         </clone>
+        <entry name="afSceneChange" type="byte" visibility="public" enum="true" hal_version="3.3">
+          <enum>
+            <value>NOT_DETECTED
+            <notes>Scene change is not detected within the AF region(s).</notes></value>
+            <value>DETECTED
+            <notes>Scene change is detected within the AF region(s).</notes></value>
+          </enum>
+          <description>Whether a significant scene change is detected within the currently-set AF
+          region(s).</description>
+          <details>When the camera focus routine detects a change in the scene it is looking at,
+          such as a large shift in camera viewpoint, significant motion in the scene, or a
+          significant illumination change, this value will be set to DETECTED for a single capture
+          result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar
+          to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes.
+
+          This key will be available if the camera device advertises this key via {@link
+          android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
+          </details>
+        </entry>
       </dynamic>
     </section>
     <section name="demosaic">
@@ -2875,14 +2958,14 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             <value>HIGH_QUALITY
             <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
             </notes></value>
-            <value optional="true">ZERO_SHUTTER_LAG
-            <notes>Edge enhancement is applied at different levels for different output streams,
-            based on resolution. Streams at maximum recording resolution (see {@link
-            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have
-            edge enhancement applied, while higher-resolution streams have no edge enhancement
-            applied. The level of edge enhancement for low-resolution streams is tuned so that
-            frame rate is not impacted, and the quality is equal to or better than FAST (since it
-            is only applied to lower-resolution outputs, quality may improve from FAST).
+            <value optional="true">ZERO_SHUTTER_LAG <notes>Edge enhancement is applied at different
+            levels for different output streams, based on resolution. Streams at maximum recording
+            resolution (see {@link
+            android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession})
+            or below have edge enhancement applied, while higher-resolution streams have no edge
+            enhancement applied. The level of edge enhancement for low-resolution streams is tuned
+            so that frame rate is not impacted, and the quality is equal to or better than FAST
+            (since it is only applied to lower-resolution outputs, quality may improve from FAST).
 
             This mode is intended to be used by applications operating in a zero-shutter-lag mode
             with YUV or PRIVATE reprocessing, where the application continuously captures
@@ -3254,8 +3337,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           Note that this orientation is relative to the orientation of the camera sensor, given
           by android.sensor.orientation.
 
-          To translate from the device orientation given by the Android sensor APIs, the following
-          sample code may be used:
+          To translate from the device orientation given by the Android sensor APIs for camera
+          sensors which are not EXTERNAL, the following sample code may be used:
 
               private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
                   if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
@@ -3274,6 +3357,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
                   return jpegOrientation;
               }
+
+          For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
+          also be set to EXTERNAL. The above code is not relevant in such case.
           </details>
           <tag id="BC" />
         </entry>
@@ -3799,36 +3885,33 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <units>Meters</units>
           <details>
             The position of the camera device's lens optical center,
-            as a three-dimensional vector `(x,y,z)`, relative to the
-            optical center of the largest camera device facing in the
-            same direction as this camera, in the {@link
-            android.hardware.SensorEvent Android sensor coordinate
-            axes}. Note that only the axis definitions are shared with
-            the sensor coordinate system, but not the origin.
-
-            If this device is the largest or only camera device with a
-            given facing, then this position will be `(0, 0, 0)`; a
-            camera device with a lens optical center located 3 cm from
-            the main sensor along the +X axis (to the right from the
-            user's perspective) will report `(0.03, 0, 0)`.
-
-            To transform a pixel coordinates between two cameras
-            facing the same direction, first the source camera
-            android.lens.radialDistortion must be corrected for.  Then
-            the source camera android.lens.intrinsicCalibration needs
-            to be applied, followed by the android.lens.poseRotation
-            of the source camera, the translation of the source camera
-            relative to the destination camera, the
-            android.lens.poseRotation of the destination camera, and
-            finally the inverse of android.lens.intrinsicCalibration
-            of the destination camera. This obtains a
-            radial-distortion-free coordinate in the destination
-            camera pixel coordinates.
-
-            To compare this against a real image from the destination
-            camera, the destination camera image then needs to be
-            corrected for radial distortion before comparison or
-            sampling.
+            as a three-dimensional vector `(x,y,z)`.
+
+            Prior to Android P, or when android.lens.poseReference is PRIMARY_CAMERA, this position
+            is relative to the optical center of the largest camera device facing in the same
+            direction as this camera, in the {@link android.hardware.SensorEvent Android sensor
+            coordinate axes}. Note that only the axis definitions are shared with the sensor
+            coordinate system, but not the origin.
+
+            If this device is the largest or only camera device with a given facing, then this
+            position will be `(0, 0, 0)`; a camera device with a lens optical center located 3 cm
+            from the main sensor along the +X axis (to the right from the user's perspective) will
+            report `(0.03, 0, 0)`.
+
+            To transform a pixel coordinates between two cameras facing the same direction, first
+            the source camera android.lens.distortion must be corrected for.  Then the source
+            camera android.lens.intrinsicCalibration needs to be applied, followed by the
+            android.lens.poseRotation of the source camera, the translation of the source camera
+            relative to the destination camera, the android.lens.poseRotation of the destination
+            camera, and finally the inverse of android.lens.intrinsicCalibration of the destination
+            camera. This obtains a radial-distortion-free coordinate in the destination camera pixel
+            coordinates.
+
+            To compare this against a real image from the destination camera, the destination camera
+            image then needs to be corrected for radial distortion before comparison or sampling.
+
+            When android.lens.poseReference is GYROSCOPE, then this position is relative to
+            the center of the primary gyroscope on the device.
           </details>
           <tag id="DEPTH" />
         </entry>
@@ -3973,7 +4056,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             where `(0,0)` is the top-left of the
             preCorrectionActiveArraySize rectangle. Once the pose and
             intrinsic calibration transforms have been applied to a
-            world point, then the android.lens.radialDistortion
+            world point, then the android.lens.distortion
             transform needs to be applied, and the result adjusted to
             be in the android.sensor.info.activeArraySize coordinate
             system (where `(0, 0)` is the top-left of the
@@ -3984,7 +4067,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <tag id="DEPTH" />
         </entry>
         <entry name="radialDistortion" type="float" visibility="public"
-               container="array">
+               deprecated="true" container="array">
           <array>
             <size>6</size>
           </array>
@@ -3992,6 +4075,10 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             The correction coefficients to correct for this camera device's
             radial and tangential lens distortion.
           </description>
+          <deprecation_description>
+            This field was inconsistently defined in terms of its
+            normalization. Use android.lens.distortion instead.
+          </deprecation_description>
           <units>
             Unitless coefficients.
           </units>
@@ -4036,6 +4123,82 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
         <clone entry="android.lens.radialDistortion" kind="static">
         </clone>
       </dynamic>
+      <static>
+        <entry name="poseReference" type="byte" visibility="public" enum="true" hal_version="3.3">
+          <enum>
+            <value>PRIMARY_CAMERA
+            <notes>The value of android.lens.poseTranslation is relative to the optical center of
+            the largest camera device facing the same direction as this camera.
+
+            This is the default value for API levels before Android P.
+            </notes>
+            </value>
+            <value>GYROSCOPE
+            <notes>The value of android.lens.poseTranslation is relative to the position of the
+            primary gyroscope of this Android device.
+            </notes>
+            </value>
+          </enum>
+          <description>
+            The origin for android.lens.poseTranslation.
+          </description>
+          <details>
+            Different calibration methods and use cases can produce better or worse results
+            depending on the selected coordinate origin.
+          </details>
+        </entry>
+        <entry name="distortion" type="float" visibility="public" container="array"
+               hal_version="3.3">
+          <array>
+            <size>5</size>
+          </array>
+          <description>
+            The correction coefficients to correct for this camera device's
+            radial and tangential lens distortion.
+
+            Replaces the deprecated android.lens.radialDistortion field, which was
+            inconsistently defined.
+          </description>
+          <units>
+            Unitless coefficients.
+          </units>
+          <details>
+            Three radial distortion coefficients `[kappa_1, kappa_2,
+            kappa_3]` and two tangential distortion coefficients
+            `[kappa_4, kappa_5]` that can be used to correct the
+            lens's geometric distortion with the mapping equations:
+
+                 x_c = x_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+                       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
+                 y_c = y_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+                       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
+
+            Here, `[x_c, y_c]` are the coordinates to sample in the
+            input image that correspond to the pixel values in the
+            corrected image at the coordinate `[x_i, y_i]`:
+
+                 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
+
+            The pixel coordinates are defined in a coordinate system
+            related to the android.lens.intrinsicCalibration
+            calibration fields; see that entry for details of the mapping stages.
+            Both `[x_i, y_i]` and `[x_c, y_c]`
+            have `(0,0)` at the lens optical center `[c_x, c_y]`, and
+            the range of the coordinates depends on the focal length
+            terms of the intrinsic calibration.
+
+            Finally, `r` represents the radial distance from the
+            optical center, `r^2 = x_i^2 + y_i^2`.
+
+            The distortion model used is the Brown-Conrady model.
+          </details>
+          <tag id="DEPTH" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.lens.distortion" kind="static">
+        </clone>
+      </dynamic>
     </section>
     <section name="noiseReduction">
       <controls>
@@ -4057,12 +4220,12 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
             <notes>Noise reduction is applied at different levels for different output streams,
             based on resolution. Streams at maximum recording resolution (see {@link
-            android.hardware.camera2.CameraDevice#createCaptureSession}) or below have noise
-            reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
-            noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
-            for low-resolution streams is tuned so that frame rate is not impacted, and the quality
-            is equal to or better than FAST (since it is only applied to lower-resolution outputs,
-            quality may improve from FAST).
+            android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession})
+            or below have noise reduction applied, while higher-resolution streams have MINIMAL (if
+            supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of
+            noise reduction for low-resolution streams is tuned so that frame rate is not impacted,
+            and the quality is equal to or better than FAST (since it is only applied to
+            lower-resolution outputs, quality may improve from FAST).
 
             This mode is intended to be used by applications operating in a zero-shutter-lag mode
             with YUV or PRIVATE reprocessing, where the application continuously captures
@@ -4171,6 +4334,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           scale 'normalized' coordinates with respect to the crop
           region. This applies to metering input (a{e,f,wb}Region
           and output (face rectangles).</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <details>Normalized coordinates refer to those in the
           (-1000,1000) range mentioned in the
           android.hardware.Camera API.
@@ -4184,6 +4350,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <description>If set to 1, then the camera service always
           switches to FOCUS_MODE_AUTO before issuing a AF
           trigger.</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <details>HAL implementations should implement AF trigger
           modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
           CONTINUOUS_PICTURE modes instead of using this flag. Does
@@ -4195,6 +4364,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           CAMERA2_PIXEL_FORMAT_ZSL instead of
           HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
           shutter lag stream</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <details>HAL implementations should use gralloc usage flags
           to determine that a stream will be used for
           zero-shutter-lag, instead of relying on an explicit
@@ -4208,6 +4380,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           metadata for a single capture into multiple buffers,
           returned using multiple process_capture_result calls.
           </description>
+          <deprecation_description>
+          Not used in HALv3 or newer; replaced by better partials mechanism
+          </deprecation_description>
           <details>
           Does not need to be listed in static
           metadata. Support for partial results will be reworked in
@@ -4240,6 +4415,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           final one for the capture, or only a partial that contains a
           subset of the full set of dynamic metadata
           values.</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <range>Optional. Default value is FINAL.</range>
           <details>
           The entries in the result metadata buffers for a
@@ -4267,6 +4445,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           increases with every new result (that is, each new result has a unique
           frameCount value).
           </description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <units>incrementing integer</units>
           <range>Any int.</range>
         </entry>
@@ -4285,6 +4466,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           </array>
           <description>List which camera reprocess stream is used
           for the source of reprocessing data.</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <units>List of camera reprocess stream IDs</units>
           <range>
           Typically, only one entry allowed, must be a valid reprocess stream ID.
@@ -4321,6 +4505,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           </array>
           <description>Lists which camera output streams image data
           from this capture must be sent to</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <units>List of camera stream IDs</units>
           <range>List must only include streams that have been
           created</range>
@@ -4343,8 +4530,11 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             needed for reprocessing with [RP]</notes></value>
           </enum>
           <description>The type of the request; either CAPTURE or
-          REPROCESS. For HAL3, this tag is redundant.
+          REPROCESS. For legacy HAL3, this tag is redundant.
           </description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <tag id="HAL2" />
         </entry>
       </controls>
@@ -4381,14 +4571,14 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           into the 3 stream types as below:
 
           * Processed (but stalling): any non-RAW format with a stallDurations &amp;gt; 0.
-            Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.
-          * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link
-            android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12
-            RAW12}.
-          * Processed (but not-stalling): any non-RAW format without a stall duration.
-            Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
-            {@link android.graphics.ImageFormat#NV21 NV21}, or
-            {@link android.graphics.ImageFormat#YV12 YV12}.
+            Typically {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format}.
+          * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16
+            RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}, or
+            {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}.
+          * Processed (but not-stalling): any non-RAW format without a stall duration.  Typically
+            {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888},
+            {@link android.graphics.ImageFormat#NV21 NV21}, or {@link
+            android.graphics.ImageFormat#YV12 YV12}.
           </details>
           <tag id="BC" />
         </entry>
@@ -4412,9 +4602,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
           In particular, a `RAW` format is typically one of:
 
-          * {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}
-          * {@link android.graphics.ImageFormat#RAW10 RAW10}
-          * {@link android.graphics.ImageFormat#RAW12 RAW12}
+          * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 RAW_SENSOR}
+          * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}
+          * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}
 
           LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
           never support raw streams.
@@ -4443,7 +4633,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
           Typically:
 
-          * {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}
+          * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888}
           * {@link android.graphics.ImageFormat#NV21 NV21}
           * {@link android.graphics.ImageFormat#YV12 YV12}
           * Implementation-defined formats, i.e. {@link
@@ -4475,8 +4665,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
 
           A processed and stalling format is defined as any non-RAW format with a stallDurations
-          &amp;gt; 0.  Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a
-          stalling format.
+          &amp;gt; 0.  Typically only the {@link
+          android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format} is a stalling format.
 
           For full guarantees, query {@link
           android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
@@ -4492,6 +4682,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           </array>
           <description>How many reprocessing streams of any type
           can be allocated at the same time.</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <range>&amp;gt;= 0</range>
           <details>
           Only used by HAL2.x.
@@ -4500,7 +4693,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           </details>
           <tag id="HAL2" />
         </entry>
-        <entry name="maxNumInputStreams" type="int32" visibility="public" hwlevel="full">
+        <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full">
           <description>
           The maximum numbers of any type of input streams
           that can be configured and used simultaneously by a camera device.
@@ -4535,6 +4728,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <description>A frame counter set by the framework. This value monotonically
           increases with every new result (that is, each new result has a unique
           frameCount value).</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <units>count of frames</units>
           <range>&amp;gt; 0</range>
           <details>Reset on release()</details>
@@ -4784,7 +4980,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
               per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
               resolution of the device, whichever is smaller.
-
+              </notes>
+              <sdk_notes>
               More specifically, this means that a size matching the camera device's active array
               size is listed as a supported size for the {@link
               android.graphics.ImageFormat#YUV_420_888} format in either {@link
@@ -4800,9 +4997,10 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
               s.
 
-              If the device supports the {@link android.graphics.ImageFormat#RAW10}, {@link
-              android.graphics.ImageFormat#RAW12}, then those can also be captured at the same rate
-              as the maximum-size YUV_420_888 resolution is.
+              If the device supports the {@link
+              android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link
+              android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, then those can also be
+              captured at the same rate as the maximum-size YUV_420_888 resolution is.
 
               If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
               as for the YUV_420_888 format also apply to the {@link
@@ -4812,7 +5010,31 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
               are also guaranteed to be `true` so burst capture with these two locks ON yields
               consistent image output.
-              </notes>
+              </sdk_notes>
+              <ndk_notes>
+              More specifically, this means that at least one output {@link
+              android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in
+              {@link
+              android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
+              is larger or equal to the 'high resolution' defined above, and can be captured at at
+              least 20 fps.  For the largest {@link
+              android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in
+              {@link
+              android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS},
+              camera device can capture this size for at least 10 frames per second.  Also the
+              android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range where
+              the minimum FPS is &gt;= 1 / minimumFrameDuration for the largest YUV_420_888 size.
+
+              If the device supports the {@link
+              android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link
+              android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, then those can also be
+              captured at the same rate as the maximum-size YUV_420_888 resolution is.
+
+              In addition, the android.sync.maxLatency field is guaranted to have a value between 0
+              and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
+              are also guaranteed to be `true` so burst capture with these two locks ON yields
+              consistent image output.
+              </ndk_notes>
             </value>
             <value optional="true" ndk_hidden="true">YUV_REPROCESSING
               <notes>
@@ -4821,10 +5043,10 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               following:
 
               * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
-              * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input format, that is,
-                YUV_420_888 is included in the lists of formats returned by
-                {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and
-                {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
+              * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input
+                format, that is, YUV_420_888 is included in the lists of formats returned by {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
+                android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
               * {@link
                 android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
                 returns non-empty int[] for each supported input format returned by {@link
@@ -4834,8 +5056,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
                 getInputSizes(YUV_420_888)} is also included in {@link
                 android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
                 getOutputSizes(YUV_420_888)}
-              * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate drop
-                relative to the sensor's maximum capture rate (at that resolution).
+              * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate
+                drop relative to the sensor's maximum capture rate (at that resolution).
               * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
                 {@link android.graphics.ImageFormat#YUV_420_888} and {@link
                 android.graphics.ImageFormat#JPEG} formats.
@@ -4844,10 +5066,10 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
                 maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
               * Static metadata android.reprocess.maxCaptureStall.
               * Only the below controls are effective for reprocessing requests and will be present
-                in capture results. The reprocess requests are from the original capture results that
-                are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888}
-                output buffers.  All other controls in the reprocess requests will be ignored by the
-                camera device.
+                in capture results. The reprocess requests are from the original capture results
+                that are associated with the intermediate {@link
+                android.graphics.ImageFormat#YUV_420_888} output buffers.  All other controls in the
+                reprocess requests will be ignored by the camera device.
                     * android.jpeg.*
                     * android.noiseReduction.mode
                     * android.edge.mode
@@ -4862,26 +5084,30 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
               This capability requires the camera device to support the following:
 
-              * {@link android.graphics.ImageFormat#DEPTH16} is supported as an output format.
-              * {@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD} is optionally supported as an
-                output format.
-              * This camera device, and all camera devices with the same android.lens.facing,
-                will list the following calibration entries in both
-                {@link android.hardware.camera2.CameraCharacteristics} and
-                {@link android.hardware.camera2.CaptureResult}:
+              * {@link android.graphics.ImageFormat#DEPTH16|AIMAGE_FORMAT_DEPTH16} is supported as
+                an output format.
+              * {@link
+                android.graphics.ImageFormat#DEPTH_POINT_CLOUD|AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is
+                optionally supported as an output format.
+              * This camera device, and all camera devices with the same android.lens.facing, will
+                list the following calibration metadata entries in both {@link
+                android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}
+                and {@link
+                android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}:
                   - android.lens.poseTranslation
                   - android.lens.poseRotation
                   - android.lens.intrinsicCalibration
-                  - android.lens.radialDistortion
+                  - android.lens.distortion
               * The android.depth.depthIsExclusive entry is listed by this device.
+              * As of Android P, the android.lens.poseReference entry is listed by this device.
               * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
                 normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
                 format.
 
               Generally, depth output operates at a slower frame rate than standard color capture,
               so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
-              should be accounted for (see
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}).
+              should be accounted for (see {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
               On a device that supports both depth and color-based output, to enable smooth preview,
               using a repeating burst is recommended, where a depth-output target is only included
               once every N frames, where N is the ratio between preview output rate and depth output
@@ -4890,25 +5116,25 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             </value>
             <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO
               <notes>
-              The device supports constrained high speed video recording (frame rate >=120fps)
-              use case. The camera device will support high speed capture session created by
-              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
-              only accepts high speed request lists created by
-              {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
-
-              A camera device can still support high speed video streaming by advertising the high speed
-              FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all normal
-              capture request per frame control and synchronization requirements will apply to
-              the high speed fps ranges, the same as all other fps ranges. This capability describes
-              the capability of a specialized operating mode with many limitations (see below), which
-              is only targeted at high speed video recording.
-
-              The supported high speed video sizes and fps ranges are specified in
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
-              To get desired output frame rates, the application is only allowed to select video size
-              and FPS range combinations provided by
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
-              The fps range can be controlled via android.control.aeTargetFpsRange.
+              The device supports constrained high speed video recording (frame rate >=120fps) use
+              case. The camera device will support high speed capture session created by {@link
+              android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
+              only accepts high speed request lists created by {@link
+              android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+
+              A camera device can still support high speed video streaming by advertising the high
+              speed FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all
+              normal capture request per frame control and synchronization requirements will apply
+              to the high speed fps ranges, the same as all other fps ranges. This capability
+              describes the capability of a specialized operating mode with many limitations (see
+              below), which is only targeted at high speed video recording.
+
+              The supported high speed video sizes and fps ranges are specified in {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
+              To get desired output frame rates, the application is only allowed to select video
+              size and FPS range combinations provided by {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.  The
+              fps range can be controlled via android.control.aeTargetFpsRange.
 
               In this capability, the camera device will override aeMode, awbMode, and afMode to
               ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
@@ -4945,19 +5171,20 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               rate will be bounded by the screen refresh rate.
 
               The camera device will only support up to 2 high speed simultaneous output surfaces
-              (preview and recording surfaces)
-              in this mode. Above controls will be effective only if all of below conditions are true:
+              (preview and recording surfaces) in this mode. Above controls will be effective only
+              if all of below conditions are true:
 
               * The application creates a camera capture session with no more than 2 surfaces via
-              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
-              targeted surfaces must be preview surface (either from
-              {@link android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or
-              recording surface(either from {@link android.media.MediaRecorder#getSurface} or
-              {@link android.media.MediaCodec#createInputSurface}).
+              {@link
+              android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
+              targeted surfaces must be preview surface (either from {@link
+              android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or recording
+              surface(either from {@link android.media.MediaRecorder#getSurface} or {@link
+              android.media.MediaCodec#createInputSurface}).
               * The stream sizes are selected from the sizes reported by
               {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
-              * The FPS ranges are selected from
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
+              * The FPS ranges are selected from {@link
+              android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
 
               When above conditions are NOT satistied,
               {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
@@ -4969,6 +5196,65 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               during high speed streaming.
               </notes>
             </value>
+            <value optional="true" hal_version="3.3" >MOTION_TRACKING
+              <notes>
+              The camera device supports the MOTION_TRACKING value for
+              android.control.captureIntent, which limits maximum exposure time to 20 ms.
+
+              This limits the motion blur of capture images, resulting in better image tracking
+              results for use cases such as image stabilization or augmented reality.
+              </notes>
+            </value>
+            <value optional="true" hal_version="3.3">LOGICAL_MULTI_CAMERA
+              <notes>
+              The camera device is a logical camera backed by two or more physical cameras that are
+              also exposed to the application.
+
+              This capability requires the camera device to support the following:
+
+              * This camera device must list the following static metadata entries in {@link
+                android.hardware.camera2.CameraCharacteristics}:
+                  - android.logicalMultiCamera.physicalIds
+                  - android.logicalMultiCamera.sensorSyncType
+              * The underlying physical cameras' static metadata must list the following entries,
+                so that the application can correlate pixels from the physical streams:
+                  - android.lens.poseReference
+                  - android.lens.poseRotation
+                  - android.lens.poseTranslation
+                  - android.lens.intrinsicCalibration
+                  - android.lens.distortion
+              * The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be
+                the same.
+              * The logical camera device must be LIMITED or higher device.
+
+              Both the logical camera device and its underlying physical devices support the
+              mandatory stream combinations required for their device levels.
+
+              Additionally, for each guaranteed stream combination, the logical camera supports:
+
+              * For each guaranteed stream combination, the logical camera supports replacing one
+                logical {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888}
+                or raw stream with two physical streams of the same size and format, each from a
+                separate physical camera, given that the size and format are supported by both
+                physical cameras.
+              * If the logical camera doesn't advertise RAW capability, but the underlying physical
+                cameras do, the logical camera will support guaranteed stream combinations for RAW
+                capability, except that the RAW streams will be physical streams, each from a separate
+                physical camera. This is usually the case when the physical cameras have different
+                sensor sizes.
+
+              Using physical streams in place of a logical stream of the same size and format will
+              not slow down the frame rate of the capture, as long as the minimum frame duration
+              of the physical and logical streams are the same.
+              </notes>
+            </value>
+            <value optional="true" hal_version="3.3" >MONOCHROME
+              <notes>
+              The camera device is a monochrome camera that doesn't contain a color filter array,
+              and the pixel values on U and V planes are all 128.
+              </notes>
+            </value>
+
           </enum>
           <description>List of capabilities that this camera device
           advertises as fully supporting.</description>
@@ -5034,6 +5320,27 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           enum notes. The entry android.depth.maxDepthSamples must be available
           if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
           DEPTH).
+
+          For a camera device with LOGICAL_MULTI_CAMERA capability, it should operate in the
+          same way as a physical camera device based on its hardware level and capabilities.
+          It's recommended that its feature set is superset of that of individual physical cameras.
+
+          For MONOCHROME, the camera device must also advertise BACKWARD_COMPATIBLE capability, and
+          it is exclusive of both RAW and MANUAL_POST_PROCESSING capabilities:
+
+          * To maintain backward compatibility, the camera device must support all
+          BACKWARD_COMPATIBLE required keys. The android.control.awbAvailableModes key only contains
+          AUTO, and android.control.awbState are either CONVERGED or LOCKED depending on
+          android.control.awbLock.
+
+          * A monochrome device doesn't need to advertise DNG related optional metadata tags.
+
+          * android.colorCorrection.mode, android.colorCorrection.transform, and
+          android.colorCorrection.gains are not applicable. So the camera device cannot
+          be a FULL device. However, the HAL can still advertise other individual capabilites.
+
+          * If the device supports tonemap control, only android.tonemap.curveRed is used.
+          CurveGreen and curveBlue are no-ops.
           </hal_details>
         </entry>
         <entry name="availableRequestKeys" type="int32" visibility="ndk_public"
@@ -5042,7 +5349,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             <size>n</size>
           </array>
           <description>A list of all keys that the camera device has available
-          to use with {@link android.hardware.camera2.CaptureRequest}.</description>
+          to use with {@link android.hardware.camera2.CaptureRequest|ACaptureRequest}.</description>
 
           <details>Attempting to set a key into a CaptureRequest that is not
           listed here will result in an invalid request and will be rejected
@@ -5073,8 +5380,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <array>
             <size>n</size>
           </array>
-          <description>A list of all keys that the camera device has available
-          to use with {@link android.hardware.camera2.CaptureResult}.</description>
+          <description>A list of all keys that the camera device has available to use with {@link
+          android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}.
+          </description>
 
           <details>Attempting to get a key from a CaptureResult that is not
           listed here will always return a `null` value. Getting a key from
@@ -5116,8 +5424,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <array>
             <size>n</size>
           </array>
-          <description>A list of all keys that the camera device has available
-          to use with {@link android.hardware.camera2.CameraCharacteristics}.</description>
+          <description>A list of all keys that the camera device has available to use with {@link
+          android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}.
+          </description>
           <details>This entry follows the same rules as
           android.request.availableResultKeys (except that it applies for
           CameraCharacteristics instead of CaptureResult). See above for more
@@ -5141,6 +5450,104 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
           </hal_details>
         </entry>
+        <entry name="availableSessionKeys" type="int32" visibility="ndk_public"
+               container="array" hwlevel="legacy" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A subset of the available request keys that the camera device
+          can pass as part of the capture session initialization.</description>
+
+          <details> This is a subset of android.request.availableRequestKeys which
+          contains a list of keys that are difficult to apply per-frame and
+          can result in unexpected delays when modified during the capture session
+          lifetime. Typical examples include parameters that require a
+          time-consuming hardware re-configuration or internal camera pipeline
+          change. For performance reasons we advise clients to pass their initial
+          values as part of
+          {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.
+          Once the camera capture session is enabled it is also recommended to avoid
+          changing them from their initial values set in
+          {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.
+          Control over session parameters can still be exerted in capture requests
+          but clients should be aware and expect delays during their application.
+          An example usage scenario could look like this:
+
+          * The camera client starts by quering the session parameter key list via
+            {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys|ACameraManager_getCameraCharacteristics}.
+          * Before triggering the capture session create sequence, a capture request
+            must be built via
+            {@link CameraDevice#createCaptureRequest|ACameraDevice_createCaptureRequest}
+            using an appropriate template matching the particular use case.
+          * The client should go over the list of session parameters and check
+            whether some of the keys listed matches with the parameters that
+            they intend to modify as part of the first capture request.
+          * If there is no such match, the capture request can be  passed
+            unmodified to
+            {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.
+          * If matches do exist, the client should update the respective values
+            and pass the request to
+            {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}.
+          * After the capture session initialization completes the session parameter
+            key list can continue to serve as reference when posting or updating
+            further requests. As mentioned above further changes to session
+            parameters should ideally be avoided, if updates are necessary
+            however clients could expect a delay/glitch during the
+            parameter switch.
+
+          </details>
+          <hal_details>
+          Vendor tags can be listed here. Vendor tag metadata should also
+          use the extensions C api (refer to
+          android.hardware.camera.device.V3_4.StreamConfiguration.sessionParams for more details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not consume any request tags in the session parameters that
+          are not listed either here or in the vendor tag list.
+
+          The public camera2 API will always make the vendor tags visible
+          via
+          {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys}.
+          </hal_details>
+        </entry>
+        <entry name="availablePhysicalCameraRequestKeys" type="int32" visibility="hidden"
+               container="array" hwlevel="limited" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>A subset of the available request keys that can be overriden for
+          physical devices backing a logical multi-camera.</description>
+          <details>
+          This is a subset of android.request.availableRequestKeys which contains a list
+          of keys that can be overriden using {@link CaptureRequest.Builder#setPhysicalCameraKey}.
+          The respective value of such request key can be obtained by calling
+          {@link CaptureRequest.Builder#getPhysicalCameraKey}. Capture requests that contain
+          individual physical device requests must be built via
+          {@link android.hardware.camera2.CameraDevice#createCaptureRequest(int, Set)}.
+          </details>
+          <hal_details>
+          Vendor tags can be listed here. Vendor tag metadata should also
+          use the extensions C api (refer to
+          android.hardware.camera.device.V3_4.CaptureRequest.physicalCameraSettings for more
+          details).
+
+          Setting/getting vendor tags will be checked against the metadata
+          vendor extensions API and not against this field.
+
+          The HAL must not consume any request tags in the session parameters that
+          are not listed either here or in the vendor tag list.
+
+          There should be no overlap between this set of keys and the available session keys
+          {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys} along
+          with any other controls that can have impact on the dual-camera sync.
+
+          The public camera2 API will always make the vendor tags visible
+          via
+          {@link android.hardware.camera2.CameraCharacteristics#getAvailablePhysicalCameraRequestKeys}.
+          </hal_details>
+        </entry>
       </static>
     </section>
     <section name="scaler">
@@ -5193,6 +5600,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             crop region used will be included in the output capture
             result.
           </details>
+          <ndk_details>
+            The data representation is int[4], which maps to (left, top, width, height).
+          </ndk_details>
           <hal_details>
             The output streams must maintain square pixels at all
             times, no matter what the relative aspect ratios of the
@@ -5317,6 +5727,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           </enum>
           <description>The list of image formats that are supported by this
           camera device for output streams.</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <details>
           All camera devices will support JPEG and YUV_420_888 formats.
 
@@ -5352,6 +5765,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <description>The minimum frame duration that is supported
           for each resolution in android.scaler.availableJpegSizes.
           </description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <units>Nanoseconds</units>
           <range>TODO: Remove property.</range>
           <details>
@@ -5371,6 +5787,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             <size>2</size>
           </array>
           <description>The JPEG resolutions that are supported by this camera device.</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <range>TODO: Remove property.</range>
           <details>
           The resolutions are listed as `(width, height)` pairs. All camera devices will support
@@ -5411,6 +5830,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           android.scaler.availableProcessedSizes), this property lists the
           minimum supportable frame duration for that size.
           </description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <units>Nanoseconds</units>
           <details>
           This should correspond to the frame duration when only that processed
@@ -5432,6 +5854,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           processed output streams, such as YV12, NV12, and
           platform opaque YUV/RGB streams to the GPU or video
           encoders.</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <details>
           The resolutions are listed as `(width, height)` pairs.
 
@@ -5471,6 +5896,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           android.scaler.availableRawSizes), this property lists the minimum
           supportable frame duration for that size.
           </description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
           <units>Nanoseconds</units>
           <details>
           Should correspond to the frame duration when only the raw stream is
@@ -5490,6 +5918,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <description>The resolutions available for use with raw
           sensor output streams, listed as width,
           height</description>
+          <deprecation_description>
+          Not used in HALv3 or newer
+          </deprecation_description>
         </entry>
       </static>
       <dynamic>
@@ -5698,9 +6129,6 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           See android.sensor.frameDuration and
           android.scaler.availableStallDurations for more details about
           calculating the max frame rate.
-
-          (Keep in sync with
-          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
           </details>
           <tag id="V1" />
         </entry>
@@ -5763,13 +6191,14 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
           The following formats may always have a stall duration:
 
-          * {@link android.graphics.ImageFormat#JPEG}
-          * {@link android.graphics.ImageFormat#RAW_SENSOR}
+          * {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG}
+          * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16}
 
           The following formats will never have a stall duration:
 
-          * {@link android.graphics.ImageFormat#YUV_420_888}
-          * {@link android.graphics.ImageFormat#RAW10}
+          * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888}
+          * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}
+          * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}
 
           All other formats may or may not have an allowed stall duration on
           a per-capability basis; refer to android.request.availableCapabilities
@@ -5777,9 +6206,6 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
           See android.sensor.frameDuration for more information about
           calculating the max frame rate (absent stalls).
-
-          (Keep up to date with
-          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} )
           </details>
           <hal_details>
           If possible, it is recommended that all non-JPEG formats
@@ -5940,9 +6366,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <description>Duration from start of frame exposure to
           start of next frame exposure.</description>
           <units>Nanoseconds</units>
-          <range>See android.sensor.info.maxFrameDuration,
-          android.scaler.streamConfigurationMap. The duration
-          is capped to `max(duration, exposureTime + overhead)`.</range>
+          <range>See android.sensor.info.maxFrameDuration, {@link
+          android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
+          The duration is capped to `max(duration, exposureTime + overhead)`.</range>
           <details>
           The maximum frame rate that can be supported by a camera subsystem is
           a function of many factors:
@@ -5980,43 +6406,36 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           can run concurrently to the rest of the camera pipeline, but
           cannot process more than 1 capture at a time.
 
-          The necessary information for the application, given the model above,
-          is provided via the android.scaler.streamConfigurationMap field using
-          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.
-          These are used to determine the maximum frame rate / minimum frame
-          duration that is possible for a given stream configuration.
+          The necessary information for the application, given the model above, is provided via
+          {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
+          These are used to determine the maximum frame rate / minimum frame duration that is
+          possible for a given stream configuration.
 
           Specifically, the application can use the following rules to
           determine the minimum frame duration it can request from the camera
           device:
 
-          1. Let the set of currently configured input/output streams
-          be called `S`.
-          1. Find the minimum frame durations for each stream in `S`, by looking
-          it up in android.scaler.streamConfigurationMap using {@link
-          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
-          (with its respective size/format). Let this set of frame durations be
-          called `F`.
-          1. For any given request `R`, the minimum frame duration allowed
-          for `R` is the maximum out of all values in `F`. Let the streams
-          used in `R` be called `S_r`.
+          1. Let the set of currently configured input/output streams be called `S`.
+          1. Find the minimum frame durations for each stream in `S`, by looking it up in {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
+          (with its respective size/format). Let this set of frame durations be called `F`.
+          1. For any given request `R`, the minimum frame duration allowed for `R` is the maximum
+          out of all values in `F`. Let the streams used in `R` be called `S_r`.
 
           If none of the streams in `S_r` have a stall time (listed in {@link
-          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}
-          using its respective size/format), then the frame duration in `F`
-          determines the steady state frame rate that the application will get
-          if it uses `R` as a repeating request. Let this special kind of
-          request be called `Rsimple`.
-
-          A repeating request `Rsimple` can be _occasionally_ interleaved
-          by a single capture of a new request `Rstall` (which has at least
-          one in-use stream with a non-0 stall time) and if `Rstall` has the
-          same minimum frame duration this will not cause a frame rate loss
-          if all buffers from the previous `Rstall` have already been
-          delivered.
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
+          using its respective size/format), then the frame duration in `F` determines the steady
+          state frame rate that the application will get if it uses `R` as a repeating request. Let
+          this special kind of request be called `Rsimple`.
 
-          For more details about stalling, see
-          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration}.
+          A repeating request `Rsimple` can be _occasionally_ interleaved by a single capture of a
+          new request `Rstall` (which has at least one in-use stream with a non-0 stall time) and if
+          `Rstall` has the same minimum frame duration this will not cause a frame rate loss if all
+          buffers from the previous `Rstall` have already been delivered.
+
+          For more details about stalling, see {@link
+          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.
 
           This control is only effective if android.control.aeMode or android.control.mode is set to
           OFF; otherwise the auto-exposure algorithm will override this value.
@@ -6083,6 +6502,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             include black calibration pixels or other inactive regions, and geometric correction
             resulting in scaling or cropping may have been applied.
             </details>
+            <ndk_details>
+            The data representation is `int[4]`, which maps to `(left, top, width, height)`.
+            </ndk_details>
             <hal_details>
             This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
             &amp;gt;= `(0,0)`.
@@ -6157,7 +6579,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             durations.
 
             Refer to {@link
-            android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}
+            android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
             for the minimum frame duration values.
             </details>
             <hal_details>
@@ -6210,9 +6632,10 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
             If a camera device supports raw sensor formats, either this or
             android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
-            output formats listed in android.scaler.streamConfigurationMap (this depends on
-            whether or not the image sensor returns buffers containing pixels that are not
-            part of the active array region for blacklevel calibration or other purposes).
+            output formats listed in {@link
+            android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}
+            (this depends on whether or not the image sensor returns buffers containing pixels that
+            are not part of the active array region for blacklevel calibration or other purposes).
 
             Some parts of the full pixel array may not receive light from the scene,
             or be otherwise inactive.  The android.sensor.info.preCorrectionActiveArraySize key
@@ -6289,7 +6712,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             SYSTEM_TIME_BOOTTIME. Note that HAL must follow above expectation; otherwise video
             recording might suffer unexpected behavior.
 
-            Also, camera devices implements REALTIME must pass the ITS sensor fusion test which
+            Also, camera devices which implement REALTIME must pass the ITS sensor fusion test which
             tests the alignment between camera timestamps and gyro sensor timestamps.
             </hal_details>
           <tag id="V1" />
@@ -6367,7 +6790,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
             The currently supported fields that correct for geometric distortion are:
 
-            1. android.lens.radialDistortion.
+            1. android.lens.distortion.
 
             If all of the geometric distortion fields are no-ops, this rectangle will be the same
             as the post-distortion-corrected rectangle given in
@@ -6380,6 +6803,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             The pre-correction active array may be smaller than the full pixel array, since the
             full array may include black calibration pixels or other inactive regions.
             </details>
+            <ndk_details>
+            The data representation is `int[4]`, which maps to `(left, top, width, height)`.
+            </ndk_details>
             <hal_details>
             This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
             &amp;gt;= `(0,0)`.
@@ -7215,6 +7641,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             When this key is reported, the android.sensor.dynamicBlackLevel and
             android.sensor.dynamicWhiteLevel will also be reported.
           </details>
+          <ndk_details>
+            The data representation is `int[4]`, which maps to `(left, top, width, height)`.
+          </ndk_details>
           <hal_details>
             This array contains (xmin, ymin, width, height). The (xmin, ymin)
             must be &amp;gt;= (0,0) and &amp;lt;=
@@ -7267,9 +7696,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           nth value given corresponds to the black level offset for the nth
           color channel listed in the CFA.
 
-          This key will be available if android.sensor.opticalBlackRegions is
-          available or the camera device advertises this key via
-          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
+          This key will be available if android.sensor.opticalBlackRegions is available or the
+          camera device advertises this key via {@link
+          android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
           </details>
           <hal_details>
           The values are given in row-column scan order, with the first value
@@ -7292,7 +7721,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
           This key will be available if android.sensor.opticalBlackRegions is
           available or the camera device advertises this key via
-          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
+          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
           </details>
           <hal_details>
           The full bit depth of the sensor must be available in the raw data,
@@ -7319,11 +7748,12 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           this key.
           </details>
           <hal_details>
-              This key is added in HAL3.4.
-              For HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this key.
-              For HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera framework
-              will derive this key by assuming each pixel takes two bytes and no padding bytes
-              between rows.
+          This key is added in legacy HAL3.4.
+
+          For legacy HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this
+          key.  For legacy HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera
+          framework will derive this key by assuming each pixel takes two bytes and no padding bytes
+          between rows.
           </hal_details>
         </entry>
       </static>
@@ -7571,6 +8001,21 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             LEGACY mode devices will always only support OFF.
             </details>
           </entry>
+          <entry name="availableOisDataModes" type="byte" visibility="public"
+                 type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3">
+            <array>
+              <size>n</size>
+            </array>
+            <description>
+            List of OIS data output modes for android.statistics.oisDataMode that
+            are supported by this camera device.
+            </description>
+            <range>Any value listed in android.statistics.oisDataMode</range>
+            <details>
+            If no OIS data output is available for this camera device, this key will
+            contain only OFF.
+            </details>
+          </entry>
         </namespace>
       </static>
       <dynamic>
@@ -7620,6 +8065,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
             `(0, 0)` being the top-left pixel of the active array.
 
             Only available if android.statistics.faceDetectMode != OFF</details>
+          <ndk_details>
+            The data representation is `int[4]`, which maps to `(left, top, width, height)`.
+          </ndk_details>
           <tag id="BC" />
         </entry>
         <entry name="faceScores" type="byte" visibility="ndk_public"
@@ -7860,6 +8308,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <description>The best-fit color channel gains calculated
           by the camera device's statistics units for the current output frame.
           </description>
+          <deprecation_description>
+          Never fully implemented or specified; do not use
+          </deprecation_description>
           <details>
           This may be different than the gains used for this frame,
           since statistics processing on data from a new frame
@@ -7886,6 +8337,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           <description>The best-fit color transform matrix estimate
           calculated by the camera device's statistics units for the current
           output frame.</description>
+          <deprecation_description>
+          Never fully implemented or specified; do not use
+          </deprecation_description>
           <details>The camera device will provide the estimate from its
           statistics unit on the white balance transforms to use
           for the next frame. These are the values the camera device believes
@@ -7998,6 +8452,100 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
         <clone entry="android.statistics.lensShadingMapMode" kind="controls">
         </clone>
       </dynamic>
+      <controls>
+        <entry name="oisDataMode" type="byte" visibility="public" enum="true" hal_version="3.3">
+          <enum>
+            <value>OFF
+            <notes>Do not include OIS data in the capture result.</notes></value>
+            <value>ON
+            <notes>Include OIS data in the capture result.</notes>
+            <sdk_notes>android.statistics.oisSamples provides OIS sample data in the
+            output result metadata.
+            </sdk_notes>
+            <ndk_notes>android.statistics.oisTimestamps, android.statistics.oisXShifts,
+            and android.statistics.oisYShifts provide OIS data in the output result metadata.
+            </ndk_notes>
+            </value>
+          </enum>
+          <description>A control for selecting whether OIS position information is included in output
+          result metadata.</description>
+          <range>android.statistics.info.availableOisDataModes</range>
+        </entry>
+      </controls>
+      <dynamic>
+        <clone entry="android.statistics.oisDataMode" kind="controls">
+        </clone>
+        <entry name="oisTimestamps" type="int64" visibility="ndk_public" container="array" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          An array of timestamps of OIS samples, in nanoseconds.
+          </description>
+          <units>nanoseconds</units>
+          <details>
+          The array contains the timestamps of OIS samples. The timestamps are in the same
+          timebase as and comparable to android.sensor.timestamp.
+          </details>
+        </entry>
+        <entry name="oisXShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          An array of shifts of OIS samples, in x direction.
+          </description>
+          <units>Pixels in active array.</units>
+          <details>
+          The array contains the amount of shifts in x direction, in pixels, based on OIS samples.
+          A positive value is a shift from left to right in active array coordinate system. For
+          example, if the optical center is (1000, 500) in active array coordinates, a shift of
+          (3, 0) puts the new optical center at (1003, 500).
+
+          The number of shifts must match the number of timestamps in
+          android.statistics.oisTimestamps.
+          </details>
+        </entry>
+        <entry name="oisYShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          An array of shifts of OIS samples, in y direction.
+          </description>
+          <units>Pixels in active array.</units>
+          <details>
+          The array contains the amount of shifts in y direction, in pixels, based on OIS samples.
+          A positive value is a shift from top to bottom in active array coordinate system. For
+          example, if the optical center is (1000, 500) in active array coordinates, a shift of
+          (0, 5) puts the new optical center at (1000, 505).
+
+          The number of shifts must match the number of timestamps in
+          android.statistics.oisTimestamps.
+          </details>
+        </entry>
+        <entry name="oisSamples" type="float" visibility="java_public" synthetic="true"
+               container="array" typedef="oisSample" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          An array of OIS samples.
+          </description>
+          <details>
+          Each OIS sample contains the timestamp and the amount of shifts in x and y direction,
+          in pixels, of the OIS sample.
+
+          A positive value for a shift in x direction is a shift from left to right in active array
+          coordinate system. For example, if the optical center is (1000, 500) in active array
+          coordinates, a shift of (3, 0) puts the new optical center at (1003, 500).
+
+          A positive value for a shift in y direction is a shift from top to bottom in active array
+          coordinate system. For example, if the optical center is (1000, 500) in active array
+          coordinates, a shift of (0, 5) puts the new optical center at (1000, 505).
+          </details>
+        </entry>
+      </dynamic>
     </section>
     <section name="tonemap">
       <controls>
@@ -8056,6 +8604,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           always provide a curve with number of points equivalent to
           android.tonemap.maxCurvePoints).
 
+          For devices with MONOCHROME capability, only red channel is used. Green and blue channels
+          are ignored.
+
           A few examples, and their corresponding graphical mappings; these
           only specify the red channel and the precision is limited to 4
           digits, for conciseness.
@@ -8126,6 +8677,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           always provide a curve with number of points equivalent to
           android.tonemap.maxCurvePoints).
 
+          For devices with MONOCHROME capability, only red channel is used. Green and blue channels
+          are ignored.
+
           A few examples, and their corresponding graphical mappings; these
           only specify the red channel and the precision is limited to 4
           digits, for conciseness.
@@ -8413,7 +8967,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               better.
 
               Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the
-              {@link android.hardware.camera2.CameraDevice#createCaptureSession
+              {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
               createCaptureSession} documentation are guaranteed to be supported.
 
               All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic
@@ -8440,7 +8994,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               This camera device is capable of supporting advanced imaging applications.
 
               The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the
-              {@link android.hardware.camera2.CameraDevice#createCaptureSession
+              {@link android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
               createCaptureSession} documentation are guaranteed to be supported.
 
               A `FULL` device will support below capabilities:
@@ -8466,8 +9020,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
               This camera device is running in backward compatibility mode.
 
               Only the stream configurations listed in the `LEGACY` table in the {@link
-              android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
-              documentation are supported.
+              android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
+              createCaptureSession} documentation are supported.
 
               A `LEGACY` device does not support per-frame control, manual sensor control, manual
               post-processing, arbitrary cropping regions, and has relaxed performance constraints.
@@ -8490,8 +9044,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
               The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and
               `LIMITED` tables in the {@link
-              android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
-              documentation are guaranteed to be supported.
+              android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession
+              createCaptureSession} documentation are guaranteed to be supported.
 
               The following additional capabilities are guaranteed to be supported:
 
@@ -8501,6 +9055,27 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
                 `RAW`)
               </notes>
             </value>
+            <value hal_version="3.3">
+              EXTERNAL
+              <notes>
+              This camera device is backed by an external camera connected to this Android device.
+
+              The device has capability identical to a LIMITED level device, with the following
+              exceptions:
+
+              * The device may not report lens/sensor related information such as
+                  - android.lens.focalLength
+                  - android.lens.info.hyperfocalDistance
+                  - android.sensor.info.physicalSize
+                  - android.sensor.info.whiteLevel
+                  - android.sensor.blackLevelPattern
+                  - android.sensor.info.colorFilterArrangement
+                  - android.sensor.rollingShutterSkew
+              * The device will report 0 for android.sensor.orientation
+              * The device has less guarantee on stable framerate, as the framerate partly depends
+                on the external camera being used.
+              </notes>
+            </value>
           </enum>
           <description>
           Generally classifies the overall set of the camera device functionality.
@@ -8540,7 +9115,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           See the individual level enums for full descriptions of the supported capabilities.  The
           android.request.availableCapabilities entry describes the device's capabilities at a
           finer-grain level, if needed. In addition, many controls have their available settings or
-          ranges defined in individual {@link android.hardware.camera2.CameraCharacteristics} entries.
+          ranges defined in individual entries from {@link
+          android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}.
 
           Some features are not part of any particular hardware level or capability and must be
           queried separately. These include:
@@ -8554,7 +9130,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
 
           </details>
           <hal_details>
-          The camera 3 HAL device can implement one of three possible operational modes; LIMITED,
+          A camera HALv3 device can implement one of three possible operational modes; LIMITED,
           FULL, and LEVEL_3.
 
           FULL support or better is expected from new higher-end devices. Limited
@@ -8565,8 +9141,28 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           For full details refer to "S3. Operational Modes" in camera3.h
 
           Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in
-          the `android.hardware.camera2` user-facing API only on HALv1 devices, and is implemented
-          by the camera framework code.
+          the `android.hardware.camera2` user-facing API only on legacy HALv1 devices, and is
+          implemented by the camera framework code.
+
+          EXTERNAL level devices have lower peformance bar in CTS since the peformance might depend
+          on the external camera being used and is not fully controlled by the device manufacturer.
+          The ITS test suite is exempted for the same reason.
+          </hal_details>
+        </entry>
+        <entry name="version" type="byte" visibility="public" typedef="string" hal_version="3.3">
+          <description>
+              A short string for manufacturer version information about the camera device, such as
+              ISP hardware, sensors, etc.
+          </description>
+          <details>
+              This can be used in {@link android.media.ExifInterface#TAG_IMAGE_DESCRIPTION TAG_IMAGE_DESCRIPTION}
+              in jpeg EXIF. This key may be absent if no version information is available on the
+              device.
+          </details>
+          <hal_details>
+              The string must consist of only alphanumeric characters, punctuation, and
+              whitespace, i.e. it must match regular expression "[\p{Alnum}\p{Punct}\p{Space}]*".
+              It must not exceed 256 characters.
           </hal_details>
         </entry>
       </static>
@@ -8991,9 +9587,6 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
           See android.sensor.frameDuration and
           android.scaler.availableStallDurations for more details about
           calculating the max frame rate.
-
-          (Keep in sync with {@link
-          android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration})
           </details>
           <tag id="DEPTH" />
         </entry>
@@ -9045,5 +9638,132 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata
         </entry>
       </static>
     </section>
+    <section name="logicalMultiCamera">
+      <static>
+        <entry name="physicalIds" type="byte" visibility="hidden"
+               container="array" hwlevel="limited" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>String containing the ids of the underlying physical cameras.
+          </description>
+          <units>UTF-8 null-terminated string</units>
+          <details>
+            For a logical camera, this is concatenation of all underlying physical camera ids.
+            The null terminator for physical camera id must be preserved so that the whole string
+            can be tokenized using '\0' to generate list of physical camera ids.
+
+            For example, if the physical camera ids of the logical camera are "2" and "3", the
+            value of this tag will be ['2', '\0', '3', '\0'].
+
+            The number of physical camera ids must be no less than 2.
+          </details>
+          <tag id="LOGICALCAMERA" />
+        </entry>
+        <entry name="sensorSyncType" type="byte" visibility="public"
+               enum="true" hwlevel="limited" hal_version="3.3">
+          <enum>
+            <value>APPROXIMATE
+              <notes>
+              A software mechanism is used to synchronize between the physical cameras. As a result,
+              the timestamp of an image from a physical stream is only an approximation of the
+              image sensor start-of-exposure time.
+              </notes>
+            </value>
+            <value>CALIBRATED
+              <notes>
+              The camera device supports frame timestamp synchronization at the hardware level,
+              and the timestamp of a physical stream image accurately reflects its
+              start-of-exposure time.
+              </notes>
+            </value>
+          </enum>
+          <description>The accuracy of frame timestamp synchronization between physical cameras</description>
+          <details>
+          The accuracy of the frame timestamp synchronization determines the physical cameras'
+          ability to start exposure at the same time. If the sensorSyncType is CALIBRATED,
+          the physical camera sensors usually run in master-slave mode so that their shutter
+          time is synchronized. For APPROXIMATE sensorSyncType, the camera sensors usually run in
+          master-master mode, and there could be offset between their start of exposure.
+
+          In both cases, all images generated for a particular capture request still carry the same
+          timestamps, so that they can be used to look up the matching frame number and
+          onCaptureStarted callback.
+          </details>
+          <tag id="LOGICALCAMERA" />
+        </entry>
+      </static>
+    </section>
+    <section name="distortionCorrection">
+      <controls>
+        <entry name="mode" type="byte" visibility="public" enum="true" hal_version="3.3">
+          <enum>
+            <value>OFF
+            <notes>No distortion correction is applied.</notes></value>
+            <value>FAST <notes>Lens distortion correction is applied without reducing frame rate
+            relative to sensor output. It may be the same as OFF if distortion correction would
+            reduce frame rate relative to sensor.</notes></value>
+            <value>HIGH_QUALITY <notes>High-quality distortion correction is applied, at the cost of
+            possibly reduced frame rate relative to sensor output.</notes></value>
+          </enum>
+          <description>Mode of operation for the lens distortion correction block.</description>
+          <range>android.distortionCorrection.availableModes</range>
+          <details>The lens distortion correction block attempts to improve image quality by fixing
+          radial, tangential, or other geometric aberrations in the camera device's optics.  If
+          available, the android.lens.distortion field documents the lens's distortion parameters.
+
+          OFF means no distortion correction is done.
+
+          FAST/HIGH_QUALITY both mean camera device determined distortion correction will be
+          applied. HIGH_QUALITY mode indicates that the camera device will use the highest-quality
+          correction algorithms, even if it slows down capture rate. FAST means the camera device
+          will not slow down capture rate when applying correction. FAST may be the same as OFF if
+          any correction at all would slow down capture rate.  Every output stream will have a
+          similar amount of enhancement applied.
+
+          The correction only applies to processed outputs such as YUV, JPEG, or DEPTH16; it is not
+          applied to any RAW output.  Metadata coordinates such as face rectangles or metering
+          regions are also not affected by correction.
+
+          Applications enabling distortion correction need to pay extra attention when converting
+          image coordinates between corrected output buffers and the sensor array. For example, if
+          the app supports tap-to-focus and enables correction, it then has to apply the distortion
+          model described in android.lens.distortion to the image buffer tap coordinates to properly
+          calculate the tap position on the sensor active array to be used with
+          android.control.afRegions. The same applies in reverse to detected face rectangles if
+          they need to be drawn on top of the corrected output buffers.
+          </details>
+        </entry>
+      </controls>
+      <static>
+        <entry name="availableModes" type="byte" visibility="public"
+        type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3">
+          <array>
+            <size>n</size>
+          </array>
+          <description>
+          List of distortion correction modes for android.distortionCorrection.mode that are
+          supported by this camera device.
+          </description>
+          <range>Any value listed in android.distortionCorrection.mode</range>
+          <details>
+            No device is required to support this API; such devices will always list only 'OFF'.
+            All devices that support this API will list both FAST and HIGH_QUALITY.
+          </details>
+          <hal_details>
+          HAL must support both FAST and HIGH_QUALITY if distortion correction is available
+          on the camera device, but the underlying implementation can be the same for both modes.
+          That is, if the highest quality implementation on the camera device does not slow down
+          capture rate, then FAST and HIGH_QUALITY will generate the same output.
+          </hal_details>
+          <tag id="V1" />
+          <tag id="REPROC" />
+        </entry>
+      </static>
+      <dynamic>
+        <clone entry="android.distortionCorrection.mode" kind="controls" hal_version="3.3">
+        </clone>
+      </dynamic>
+    </section>
   </namespace>
 </metadata>
similarity index 94%
rename from camera/docs/metadata_properties.xsd
rename to camera/docs/metadata_definitions.xsd
index b4661d8..8e46cb1 100644 (file)
             <element name="description" type="string" maxOccurs="1"
                 minOccurs="0">
             </element>
+            <element name="deprecation_description" type="string" maxOccurs="1"
+                     minOccurs="0">
+            </element>
             <element name="units" type="string" maxOccurs="1"
                 minOccurs="0">
             </element>
             <element name="details" type="string" maxOccurs="1"
                 minOccurs="0">
             </element>
+            <element name="ndk_details" type="string" maxOccurs="1"
+                minOccurs="0">
+            </element>
             <element name="hal_details" type="string" maxOccurs="1"
                 minOccurs="0">
             </element>
                 </restriction>
             </simpleType>
         </attribute>
+        <attribute name="hal_version" type="decimal" default="3.2" />
     </complexType>
 
     <complexType name="EnumType">
 
     <complexType name="EnumValueType" mixed="true">
 
-        <sequence>
-            <element name="notes" type="string" minOccurs="0" maxOccurs="1" />
-        </sequence>
+        <all>
+            <element name="notes" type="string" minOccurs="0" maxOccurs="1"/>
+            <element name="sdk_notes" type="string" minOccurs="0" maxOccurs="1"/>
+            <element name="ndk_notes" type="string" minOccurs="0" maxOccurs="1" />
+        </all>
 
         <attribute name="deprecated" default="false">
             <simpleType>
             </simpleType>
         </attribute>
         <attribute name="id" type="string" />
+        <attribute name="hal_version" type="decimal" default="3.2" />
     </complexType>
 
     <complexType name="CloneType">
                 </restriction>
             </simpleType>
         </attribute>
+        <attribute name="hal_version" type="decimal" default="3.2" />
     </complexType>
 </schema>
index 22d636f..ed303bb 100644 (file)
@@ -33,10 +33,12 @@ from collections import OrderedDict
 IMAGE_SRC_METADATA="images/camera2/metadata/"
 
 # Prepend this path to each <img src="foo"> in javadocs
-JAVADOC_IMAGE_SRC_METADATA="../../../../" + IMAGE_SRC_METADATA
+JAVADOC_IMAGE_SRC_METADATA="/reference/" + IMAGE_SRC_METADATA
 NDKDOC_IMAGE_SRC_METADATA="../" + IMAGE_SRC_METADATA
 
 _context_buf = None
+_hal_major_version = None
+_hal_minor_version = None
 
 def _is_sec_or_ins(x):
   return isinstance(x, metadata_model.Section) or    \
@@ -174,7 +176,8 @@ def protobuf_type(entry):
     "double"                 : "double",
     "int32"                  : "int32",
     "int64"                  : "int64",
-    "enumList"               : "int32"
+    "enumList"               : "int32",
+    "string"                 : "string"
   }
 
   if typeName not in typename_to_protobuftype:
@@ -786,7 +789,7 @@ def javadoc(metadata, indent = 4):
   javadoc comment section, given a set of metadata
 
   Args:
-    metadata: A Metadata instance, representing the the top-level root
+    metadata: A Metadata instance, representing the top-level root
       of the metadata for cross-referencing
     indent: baseline level of indentation for javadoc block
   Returns:
@@ -815,13 +818,13 @@ def javadoc(metadata, indent = 4):
     "    * @see CaptureRequest#CONTROL_MODE\n"
   """
   def javadoc_formatter(text):
-    comment_prefix = " " * indent + " * ";
+    comment_prefix = " " * indent + " * "
 
     # render with markdown => HTML
     javatext = md(text, JAVADOC_IMAGE_SRC_METADATA)
 
     # Identity transform for javadoc links
-    def javadoc_link_filter(target, shortname):
+    def javadoc_link_filter(target, target_ndk, shortname):
       return '{@link %s %s}' % (target, shortname)
 
     javatext = filter_links(javatext, javadoc_link_filter)
@@ -875,7 +878,7 @@ def ndkdoc(metadata, indent = 4):
   NDK camera API C/C++ comment section, given a set of metadata
 
   Args:
-    metadata: A Metadata instance, representing the the top-level root
+    metadata: A Metadata instance, representing the top-level root
       of the metadata for cross-referencing
     indent: baseline level of indentation for comment block
   Returns:
@@ -907,6 +910,29 @@ def ndkdoc(metadata, indent = 4):
     # Turn off the table plugin since doxygen doesn't recognize generated <thead> <tbody> tags
     ndktext = md(text, NDKDOC_IMAGE_SRC_METADATA, False)
 
+    # Simple transform for ndk doc links
+    def ndkdoc_link_filter(target, target_ndk, shortname):
+      if target_ndk is not None:
+        return '{@link %s %s}' % (target_ndk, shortname)
+
+      # Create HTML link to Javadoc
+      if shortname == '':
+        lastdot = target.rfind('.')
+        if lastdot == -1:
+          shortname = target
+        else:
+          shortname = target[lastdot + 1:]
+
+      target = target.replace('.','/')
+      if target.find('#') != -1:
+        target = target.replace('#','.html#')
+      else:
+        target = target + '.html'
+
+      return '<a href="https://developer.android.com/reference/%s">%s</a>' % (target, shortname)
+
+    ndktext = filter_links(ndktext, ndkdoc_link_filter)
+
     # Convert metadata entry "android.x.y.z" to form
     # NDK tag format of "ACAMERA_X_Y_Z"
     def ndkdoc_crossref_filter(node):
@@ -945,6 +971,98 @@ def ndkdoc(metadata, indent = 4):
 
   return ndkdoc_formatter
 
+def hidldoc(metadata, indent = 4):
+  """
+  Returns a function to format a markdown syntax text block as a
+  HIDL camera HAL module C/C++ comment section, given a set of metadata
+
+  Args:
+    metadata: A Metadata instance, representing the top-level root
+      of the metadata for cross-referencing
+    indent: baseline level of indentation for comment block
+  Returns:
+    A function that transforms a String text block as follows:
+    - Indent and * for insertion into a comment block
+    - Trailing whitespace removed
+    - Entire body rendered via markdown
+    - All tag names converted to appropriate HIDL tag name for each tag
+
+  Example:
+    "This is a comment for NDK\n" +
+    "     with multiple lines, that should be   \n" +
+    "     formatted better\n" +
+    "\n" +
+    "    That covers multiple lines as well\n"
+    "    And references android.control.mode\n"
+
+    transforms to
+    "    * This is a comment for NDK\n" +
+    "    * with multiple lines, that should be\n" +
+    "    * formatted better\n" +
+    "    * That covers multiple lines as well\n" +
+    "    * and references ANDROID_CONTROL_MODE\n" +
+    "    *\n" +
+    "    * @see ANDROID_CONTROL_MODE\n"
+  """
+  def hidldoc_formatter(text):
+    # render with markdown => HTML
+    # Turn off the table plugin since doxygen doesn't recognize generated <thead> <tbody> tags
+    hidltext = md(text, NDKDOC_IMAGE_SRC_METADATA, False)
+
+    # Simple transform for hidl doc links
+    def hidldoc_link_filter(target, target_ndk, shortname):
+      if target_ndk is not None:
+        return '{@link %s %s}' % (target_ndk, shortname)
+
+      # Create HTML link to Javadoc
+      if shortname == '':
+        lastdot = target.rfind('.')
+        if lastdot == -1:
+          shortname = target
+        else:
+          shortname = target[lastdot + 1:]
+
+      target = target.replace('.','/')
+      if target.find('#') != -1:
+        target = target.replace('#','.html#')
+      else:
+        target = target + '.html'
+
+      return '<a href="https://developer.android.com/reference/%s">%s</a>' % (target, shortname)
+
+    hidltext = filter_links(hidltext, hidldoc_link_filter)
+
+    # Convert metadata entry "android.x.y.z" to form
+    # HIDL tag format of "ANDROID_X_Y_Z"
+    def hidldoc_crossref_filter(node):
+      return csym(node.name)
+
+    # For each public tag "android.x.y.z" referenced, add a
+    # "@see ANDROID_X_Y_Z"
+    def hidldoc_crossref_see_filter(node_set):
+      text = '\n'
+      for node in node_set:
+        text = text + '\n@see %s' % (csym(node.name))
+
+      return text if text != '\n' else ''
+
+    hidltext = filter_tags(hidltext, metadata, hidldoc_crossref_filter, hidldoc_crossref_see_filter)
+
+    comment_prefix = " " * indent + " * ";
+
+    def line_filter(line):
+      # Indent each line
+      # Add ' * ' to it for stylistic reasons
+      # Strip right side of trailing whitespace
+      return (comment_prefix + line).rstrip()
+
+    # Process each line with above filter
+    hidltext = "\n".join(line_filter(i) for i in hidltext.split("\n")) + "\n"
+
+    return hidltext
+
+  return hidldoc_formatter
+
 def dedent(text):
   """
   Remove all common indentation from every line but the 0th.
@@ -1163,18 +1281,19 @@ def filter_links(text, filter_function, summary_function = None):
     def name_match(name):
       return lambda node: node.name == name
 
-    tag_match = r"\{@link\s+([^\s\}]+)([^\}]*)\}"
+    tag_match = r"\{@link\s+([^\s\}\|]+)(?:\|([^\s\}]+))*([^\}]*)\}"
 
     def filter_sub(match):
       whole_match = match.group(0)
       target = match.group(1)
-      shortname = match.group(2).strip()
+      target_ndk = match.group(2)
+      shortname = match.group(3).strip()
 
-      #print "Found link '%s' as '%s' -> '%s'" % (target, shortname, filter_function(target, shortname))
+      #print "Found link '%s' ndk '%s' as '%s' -> '%s'" % (target, target_ndk, shortname, filter_function(target, target_ndk, shortname))
 
       # Replace match with crossref
       target_set.add(target)
-      return filter_function(target, shortname)
+      return filter_function(target, target_ndk, shortname)
 
     text = re.sub(tag_match, filter_sub, text)
 
@@ -1232,6 +1351,34 @@ def remove_synthetic(entries):
   """
   return (e for e in entries if not e.synthetic)
 
+def filter_added_in_hal_version(entries, hal_major_version, hal_minor_version):
+  """
+  Filter the given entries to those added in the given HIDL HAL version
+
+  Args:
+    entries: An iterable of Entry nodes
+    hal_major_version: Major HIDL version to filter for
+    hal_minor_version: Minor HIDL version to filter for
+
+  Yields:
+    An iterable of Entry nodes
+  """
+  return (e for e in entries if e.hal_major_version == hal_major_version and e.hal_minor_version == hal_minor_version)
+
+def filter_has_enum_values_added_in_hal_version(entries, hal_major_version, hal_minor_version):
+  """
+  Filter the given entries to those that have a new enum value added in the given HIDL HAL version
+
+  Args:
+    entries: An iterable of Entry nodes
+    hal_major_version: Major HIDL version to filter for
+    hal_minor_version: Minor HIDL version to filter for
+
+  Yields:
+    An iterable of Entry nodes
+  """
+  return (e for e in entries if e.has_new_values_added_in_hal_version(hal_major_version, hal_minor_version))
+
 def filter_ndk_visible(entries):
   """
   Filter the given entries by removing those that are not NDK visible.
@@ -1303,3 +1450,53 @@ def wbr(text):
       navigable_string.extract()
 
   return soup.decode()
+
+def hal_major_version():
+  return _hal_major_version
+
+def hal_minor_version():
+  return _hal_minor_version
+
+def first_hal_minor_version(hal_major_version):
+  return 2 if hal_major_version == 3 else 0
+
+def find_all_sections_added_in_hal(root, hal_major_version, hal_minor_version):
+  """
+  Find all descendants that are Section or InnerNamespace instances, which
+  were added in HIDL HAL version major.minor. The section is defined to be
+  added in a HAL version iff the lowest HAL version number of its entries is
+  that HAL version.
+
+  Args:
+    root: a Metadata instance
+    hal_major/minor_version: HAL version numbers
+
+  Returns:
+    A list of Section/InnerNamespace instances
+
+  Remarks:
+    These are known as "sections" in the generated C code.
+  """
+  all_sections = find_all_sections(root)
+  new_sections = []
+  for section in all_sections:
+    min_major_version = None
+    min_minor_version = None
+    for entry in remove_synthetic(find_unique_entries(section)):
+      min_major_version = (min_major_version or entry.hal_major_version)
+      min_minor_version = (min_minor_version or entry.hal_minor_version)
+      if entry.hal_major_version < min_major_version or \
+          (entry.hal_major_version == min_major_version and entry.hal_minor_version < min_minor_version):
+        min_minor_version = entry.hal_minor_version
+        min_major_version = entry.hal_major_version
+    if min_major_version == hal_major_version and min_minor_version == hal_minor_version:
+      new_sections.append(section)
+  return new_sections
+
+def find_first_older_used_hal_version(section, hal_major_version, hal_minor_version):
+  hal_version = (0, 0)
+  for v in section.hal_versions:
+    if (v[0] > hal_version[0] or (v[0] == hal_version[0] and v[1] > hal_version[1])) and \
+        (v[0] < hal_major_version or (v[0] == hal_major_version and v[1] < hal_minor_version)):
+      hal_version = v
+  return hal_version
index e059e33..398e43a 100644 (file)
@@ -18,7 +18,7 @@
 
 """
 A set of classes (models) each closely representing an XML node in the
-metadata_properties.xml file.
+metadata_definitions.xml file.
 
   Node: Base class for most nodes.
   Entry: A node corresponding to <entry> elements.
@@ -416,7 +416,9 @@ class Metadata(Node):
       target_kind = p.target_kind
       target_entry = self._entry_map[target_kind].get(p.name)
       p._entry = target_entry
-
+      if (p.hal_major_version == 0):
+        p._hal_major_version = target_entry._hal_major_version
+        p._hal_minor_version = target_entry._hal_minor_version
       # should not throw if we pass validation
       # but can happen when importing obsolete CSV entries
       if target_entry is None:
@@ -690,6 +692,7 @@ class Section(Node):
     kinds: A sequence of Kind children.
     merged_kinds: A sequence of virtual Kind children,
                   with each Kind's children merged by the kind.name
+    hal_versions: A set of tuples (major, minor) describing all the HAL versions entries in this section have
   """
   def __init__(self, name, parent, description=None, kinds=[]):
     self._name = name
@@ -699,7 +702,6 @@ class Section(Node):
 
     self._leafs = []
 
-
   @property
   def description(self):
     return self._description
@@ -708,6 +710,16 @@ class Section(Node):
   def kinds(self):
     return (i for i in self._kinds)
 
+  @property
+  def hal_versions(self):
+    hal_versions = set()
+    for i in self._kinds:
+      for entry in i.entries:
+        hal_versions.add( (entry.hal_major_version, entry.hal_minor_version) )
+      for namespace in i.namespaces:
+        hal_versions.update(namespace.hal_versions)
+    return hal_versions
+
   def sort_children(self):
     self.validate_tree()
     # order is always controls,static,dynamic
@@ -884,6 +896,7 @@ class InnerNamespace(Node):
     namespaces: A sequence of InnerNamespace children.
     entries: A sequence of Entry/Clone children.
     merged_entries: A sequence of MergedEntry virtual nodes from entries
+    hal_versions: A set of tuples (major, minor) describing all the HAL versions entries in this section have
   """
   def __init__(self, name, parent):
     self._name        = name
@@ -901,6 +914,15 @@ class InnerNamespace(Node):
     return self._entries
 
   @property
+  def hal_versions(self):
+    hal_versions = set()
+    for entry in self.entries:
+      hal_versions.add( (entry.hal_major_version, entry.hal_minor_version) )
+    for namespace in self.namespaces:
+      hal_versions.update(namespace.hal_versions)
+    return hal_versions
+
+  @property
   def merged_entries(self):
     for i in self.entries:
       yield i.merge()
@@ -963,10 +985,14 @@ class EnumValue(Node):
     hidden: A boolean, True if the enum should be hidden.
     ndk_hidden: A boolean, True if the enum should be hidden in NDK
     notes: A string describing the notes, or None.
+    sdk_notes: A string describing extra notes for public SDK only
+    ndk_notes: A string describing extra notes for public NDK only
     parent: An edge to the parent, always an Enum instance.
+    hal_major_version: The major HIDL HAL version this value was first added in
+    hal_minor_version: The minor HIDL HAL version this value was first added in
   """
   def __init__(self, name, parent,
-      id=None, deprecated=False, optional=False, hidden=False, notes=None, ndk_hidden=False):
+      id=None, deprecated=False, optional=False, hidden=False, notes=None, sdk_notes=None, ndk_notes=None, ndk_hidden=False, hal_version='3.2'):
     self._name = name                    # str, e.g. 'ON' or 'OFF'
     self._id = id                        # int, e.g. '0'
     self._deprecated = deprecated        # bool
@@ -974,7 +1000,19 @@ class EnumValue(Node):
     self._hidden = hidden                # bool
     self._ndk_hidden = ndk_hidden        # bool
     self._notes = notes                  # None or str
+    self._sdk_notes = sdk_notes          # None or str
+    self._ndk_notes = ndk_notes          # None or str
     self._parent = parent
+    if hal_version is None:
+      if parent is not None and parent.parent is not None:
+        self._hal_major_version = parent.parent.hal_major_version
+        self._hal_minor_version = parent.parent.hal_minor_version
+      else:
+        self._hal_major_version = 3
+        self._hal_minor_version = 2
+    else:
+      self._hal_major_version = int(hal_version.partition('.')[0])
+      self._hal_minor_version = int(hal_version.partition('.')[2])
 
   @property
   def id(self):
@@ -1000,6 +1038,22 @@ class EnumValue(Node):
   def notes(self):
     return self._notes
 
+  @property
+  def sdk_notes(self):
+    return self._sdk_notes
+
+  @property
+  def ndk_notes(self):
+    return self._ndk_notes
+
+  @property
+  def hal_major_version(self):
+    return self._hal_major_version
+
+  @property
+  def hal_minor_version(self):
+    return self._hal_minor_version
+
   def _get_children(self):
     return None
 
@@ -1014,15 +1068,14 @@ class Enum(Node):
         non-empty id property.
   """
   def __init__(self, parent, values, ids={}, deprecateds=[],
-      optionals=[], hiddens=[], notes={}, ndk_hiddens=[]):
+      optionals=[], hiddens=[], notes={}, sdk_notes={}, ndk_notes={}, ndk_hiddens=[], hal_versions={}):
+    self._parent = parent
+    self._name = None
     self._values =                                                             \
       [ EnumValue(val, self, ids.get(val), val in deprecateds, val in optionals, val in hiddens,  \
-                  notes.get(val), val in ndk_hiddens)                                              \
+                  notes.get(val), sdk_notes.get(val), ndk_notes.get(val), val in ndk_hiddens, hal_versions.get(val))     \
         for val in values ]
 
-    self._parent = parent
-    self._name = None
-
   @property
   def values(self):
     return (i for i in self._values)
@@ -1031,6 +1084,9 @@ class Enum(Node):
   def has_values_with_id(self):
     return bool(any(i for i in self.values if i.id))
 
+  def has_new_values_added_in_hal_version(self, hal_major_version, hal_minor_version):
+    return bool(any(i for i in self.values if i.hal_major_version == hal_major_version and i.hal_minor_version == hal_minor_version))
+
   def _get_children(self):
     return (i for i in self._values)
 
@@ -1078,6 +1134,8 @@ class Entry(Node):
     tuple_values: A sequence of strings describing the tuple values,
                   None if container is not 'tuple'.
     description: A string description, or None.
+    deprecation_description: A string describing the reason for deprecation. Must be present
+                 if deprecated is true, otherwise may be None.
     range: A string range, or None.
     units: A string units, or None.
     tags: A sequence of Tag nodes associated with this Entry.
@@ -1099,6 +1157,8 @@ class Entry(Node):
       name: A string with the fully qualified name, e.g. 'android.shading.mode'
       type: A string describing the type, e.g. 'int32'
       kind: A string describing the kind, e.g. 'static'
+      hal_version: A string for the initial HIDL HAL metadata version this entry
+                   was added in
 
     Args (if container):
       container: A string describing the container, e.g. 'array' or 'tuple'
@@ -1113,6 +1173,11 @@ class Entry(Node):
       enum_optionals: A list of optional enum values, e.g. ['OFF']
       enum_notes: A dictionary of value->notes strings.
       enum_ids: A dictionary of value->id strings.
+      enum_hal_versions: A dictionary of value->hal version strings
+
+    Args (if the 'deprecated' attribute is true):
+      deprecation_description: A string explaining the deprecation, to be added
+                               to the Java-layer @deprecated tag
 
     Args (optional):
       description: A string with a description of the entry.
@@ -1120,6 +1185,7 @@ class Entry(Node):
       units: A string with the units of the values, e.g. 'inches'
       details: A string with the detailed documentation for the entry
       hal_details: A string with the HAL implementation details for the entry
+      ndk_details: A string with the extra NDK API documentation for the entry=
       tag_ids: A list of tag ID strings, e.g. ['BC', 'V1']
       type_notes: A string with the notes for the type
       visibility: A string describing the visibility, eg 'system', 'hidden',
@@ -1155,6 +1221,14 @@ class Entry(Node):
     return self._kind
 
   @property
+  def hal_major_version(self):
+    return self._hal_major_version
+
+  @property
+  def hal_minor_version(self):
+    return self._hal_minor_version
+
+  @property
   def visibility(self):
     return self._visibility
 
@@ -1180,6 +1254,10 @@ class Entry(Node):
   def deprecated(self):
     return self._deprecated
 
+  @property
+  def deprecation_description(self):
+    return self._deprecation_description
+
   # TODO: optional should just return hwlevel is None
   @property
   def optional(self):
@@ -1232,6 +1310,14 @@ class Entry(Node):
     return self._hal_details
 
   @property
+  def ndk_details(self):
+    return self._ndk_details
+
+  @property
+  def applied_ndk_details(self):
+    return (self._details or "") + (self._ndk_details or "")
+
+  @property
   def tags(self):
     if self._tags is None:
       return None
@@ -1250,6 +1336,12 @@ class Entry(Node):
   def enum(self):
     return self._enum
 
+  def has_new_values_added_in_hal_version(self, hal_major_version, hal_minor_version):
+    if self._enum is not None:
+      return self._enum.has_new_values_added_in_hal_version(hal_major_version,hal_minor_version)
+    else:
+      return False
+
   def _get_children(self):
     if self.enum:
       yield self.enum
@@ -1273,6 +1365,18 @@ class Entry(Node):
     self._container = kwargs.get('container')
     self._container_sizes = kwargs.get('container_sizes')
 
+    hal_version = kwargs.get('hal_version')
+    if hal_version is None:
+      if self.is_clone():
+        self._hal_major_version = 0
+        self._hal_minor_version = 0
+      else:
+        self._hal_major_version = 3
+        self._hal_minor_version = 2
+    else:
+      self._hal_major_version = int(hal_version.partition('.')[0])
+      self._hal_minor_version = int(hal_version.partition('.')[2])
+
     # access these via the 'enum' prop
     enum_values = kwargs.get('enum_values')
     enum_deprecateds = kwargs.get('enum_deprecateds')
@@ -1280,7 +1384,11 @@ class Entry(Node):
     enum_hiddens = kwargs.get('enum_hiddens')
     enum_ndk_hiddens = kwargs.get('enum_ndk_hiddens')
     enum_notes = kwargs.get('enum_notes')  # { value => notes }
+    enum_sdk_notes = kwargs.get('enum_sdk_notes')  # { value => sdk_notes }
+    enum_ndk_notes = kwargs.get('enum_ndk_notes')  # { value => ndk_notes }
     enum_ids = kwargs.get('enum_ids')  # { value => notes }
+    enum_hal_versions = kwargs.get('enum_hal_versions') # { value => hal_versions }
+
     self._tuple_values = kwargs.get('tuple_values')
 
     self._description = kwargs.get('description')
@@ -1288,6 +1396,7 @@ class Entry(Node):
     self._units = kwargs.get('units')
     self._details = kwargs.get('details')
     self._hal_details = kwargs.get('hal_details')
+    self._ndk_details = kwargs.get('ndk_details')
 
     self._tag_ids = kwargs.get('tag_ids', [])
     self._tags = None  # Filled in by Metadata::_construct_tags
@@ -1298,7 +1407,7 @@ class Entry(Node):
 
     if kwargs.get('enum', False):
       self._enum = Enum(self, enum_values, enum_ids, enum_deprecateds, enum_optionals,
-                        enum_hiddens, enum_notes, enum_ndk_hiddens)
+                        enum_hiddens, enum_notes, enum_sdk_notes, enum_ndk_notes, enum_ndk_hiddens, enum_hal_versions)
     else:
       self._enum = None
 
@@ -1306,6 +1415,8 @@ class Entry(Node):
     self._synthetic = kwargs.get('synthetic', False)
     self._hwlevel = kwargs.get('hwlevel')
     self._deprecated = kwargs.get('deprecated', False)
+    self._deprecation_description = kwargs.get('deprecation_description')
+
     self._optional = kwargs.get('optional')
     self._ndk_visible = kwargs.get('ndk_visible')
 
@@ -1414,6 +1525,8 @@ class Clone(Entry):
       type: A string describing the type, e.g. 'int32'
       kind: A string describing the kind, e.g. 'static'
       target_kind: A string for the kind of the target entry, e.g. 'dynamic'
+      hal_version: A string for the initial HIDL HAL metadata version this entry
+                   was added in
 
     Args (if container):
       container: A string describing the container, e.g. 'array' or 'tuple'
@@ -1436,6 +1549,7 @@ class Clone(Entry):
       units: A string with the units of the values, e.g. 'inches'
       details: A string with the detailed documentation for the entry
       hal_details: A string with the HAL implementation details for the entry
+      ndk_details: A string with the extra NDK documentation for the entry
       tag_ids: A list of tag ID strings, e.g. ['BC', 'V1']
       type_notes: A string with the notes for the type
 
@@ -1489,7 +1603,8 @@ class MergedEntry(Entry):
       entry: An Entry or Clone instance
     """
     props_distinct = ['description', 'units', 'range', 'details',
-                      'hal_details', 'tags', 'kind']
+                      'hal_details', 'ndk_details', 'tags', 'kind',
+                      'deprecation_description']
 
     for p in props_distinct:
       p = '_' + p
@@ -1509,7 +1624,9 @@ class MergedEntry(Entry):
                     'hwlevel',
                     'deprecated',
                     'optional',
-                    'typedef'
+                    'typedef',
+                    'hal_major_version',
+                    'hal_minor_version'
                    ]
 
     for p in props_common:
index eb79c9b..540fb34 100644 (file)
@@ -14,11 +14,11 @@ class TestInnerNamespace(TestCase):
     ins1 = InnerNamespace("ins1", parent=ins_outer)
     ins1a = InnerNamespace("ins1", parent=ins_outer)  # same name deliberately
     entry1 = Entry(name="entry1", type="int32", kind="static",
-                   parent=ins1)
+                   parent=ins1, hal_version="3.2")
     entry2 = Entry(name="entry2", type="int32", kind="static",
-                   parent=ins1a)
+                   parent=ins1a, hal_version="3.2")
     entry3 = Entry(name="entry3", type="int32", kind="static",
-                   parent=ins_outer)
+                   parent=ins_outer, hal_version="3.2")
 
     ins_outer._namespaces = [ins1, ins1a]
     ins_outer._entries = [entry3]
@@ -60,9 +60,9 @@ class TestKind(TestCase):
     ins1 = InnerNamespace("ins1", parent=kind_static)
     ins2 = InnerNamespace("ins2", parent=kind_dynamic)
     entry1 = Entry(name="entry1", type="int32", kind="static",
-                   parent=kind_static)
+                   parent=kind_static, hal_version="3.2")
     entry2 = Entry(name="entry2", type="int32", kind="static",
-                   parent=kind_dynamic)
+                   parent=kind_dynamic, hal_version="3.2")
 
     kind_static._namespaces = [ins1]
     kind_static._entries = [entry1]
@@ -94,11 +94,11 @@ class TestKind(TestCase):
     ins1 = InnerNamespace("ins1", parent=kind_static)
     ins1a = InnerNamespace("ins1", parent=kind_static)  # same name deliberately
     entry1 = Entry(name="entry1", type="int32", kind="static",
-                   parent=ins1)
+                   parent=ins1, hal_version="3.2")
     entry2 = Entry(name="entry2", type="int32", kind="static",
-                   parent=ins1a)
+                   parent=ins1a, hal_version="3.2")
     entry3 = Entry(name="entry3", type="int32", kind="static",
-                   parent=kind_static)
+                   parent=kind_static, hal_version="3.2")
 
     kind_static._namespaces = [ins1, ins1a]
     kind_static._entries = [entry3]
index 6759306..91af192 100755 (executable)
@@ -17,7 +17,7 @@
 #
 
 """
-A parser for metadata_properties.xml can also render the resulting model
+A parser for metadata_definitions.xml can also render the resulting model
 over a Mako template.
 
 Usage:
@@ -156,6 +156,8 @@ class MetadataParserXml:
           # no type_notes since its the same
         }
         d2 = {}
+        if 'hal_version' in entry.attrs:
+          d2['hal_version'] = entry['hal_version']
 
         insert = self.metadata.insert_clone
 
@@ -200,6 +202,10 @@ class MetadataParserXml:
     d['type_name'] = entry.get('typedef')
 
     #
+    # Initial HIDL HAL version the entry was added in
+    d['hal_version'] = entry.get('hal_version')
+
+    #
     # Enum
     #
     if entry.get('enum', 'false') == 'true':
@@ -210,7 +216,10 @@ class MetadataParserXml:
       enum_hiddens = []
       enum_ndk_hiddens = []
       enum_notes = {}
+      enum_sdk_notes = {}
+      enum_ndk_notes = {}
       enum_ids = {}
+      enum_hal_versions = {}
       for value in entry.enum.find_all('value'):
 
         value_body = self._strings_no_nl(value)
@@ -232,16 +241,30 @@ class MetadataParserXml:
         if notes is not None:
           enum_notes[value_body] = notes.string
 
+        sdk_notes = value.find('sdk_notes')
+        if sdk_notes is not None:
+          enum_sdk_notes[value_body] = sdk_notes.string
+
+        ndk_notes = value.find('ndk_notes')
+        if ndk_notes is not None:
+          enum_ndk_notes[value_body] = ndk_notes.string
+
         if value.attrs.get('id') is not None:
           enum_ids[value_body] = value['id']
 
+        if value.attrs.get('hal_version') is not None:
+          enum_hal_versions[value_body] = value['hal_version']
+
       d['enum_values'] = enum_values
       d['enum_deprecateds'] = enum_deprecateds
       d['enum_optionals'] = enum_optionals
       d['enum_hiddens'] = enum_hiddens
       d['enum_ndk_hiddens'] = enum_ndk_hiddens
       d['enum_notes'] = enum_notes
+      d['enum_sdk_notes'] = enum_sdk_notes
+      d['enum_ndk_notes'] = enum_ndk_notes
       d['enum_ids'] = enum_ids
+      d['enum_hal_versions'] = enum_hal_versions
       d['enum'] = True
 
     #
@@ -272,7 +295,8 @@ class MetadataParserXml:
   def _parse_entry_optional(self, entry):
     d = {}
 
-    optional_elements = ['description', 'range', 'units', 'details', 'hal_details']
+    optional_elements = ['description', 'range', 'units', 'details', 'hal_details', 'ndk_details',\
+                         'deprecation_description']
     for i in optional_elements:
       prop = find_child_tag(entry, i)
 
@@ -287,7 +311,7 @@ class MetadataParserXml:
 
     return d
 
-  def render(self, template, output_name=None):
+  def render(self, template, output_name=None, hal_version="3.2"):
     """
     Render the metadata model using a Mako template as the view.
 
@@ -299,9 +323,13 @@ class MetadataParserXml:
     Args:
       template: path to a Mako template file
       output_name: path to the output file, or None to use stdout
+      hal_version: target HAL version, used when generating HIDL HAL outputs.
+                   Must be a string of form "X.Y" where X and Y are integers.
     """
     buf = StringIO.StringIO()
     metadata_helpers._context_buf = buf
+    metadata_helpers._hal_major_version = int(hal_version.partition('.')[0])
+    metadata_helpers._hal_minor_version = int(hal_version.partition('.')[2])
 
     helpers = [(i, getattr(metadata_helpers, i))
                 for i in dir(metadata_helpers) if not i.startswith('_')]
@@ -328,14 +356,16 @@ class MetadataParserXml:
 if __name__ == "__main__":
   if len(sys.argv) <= 2:
     print >> sys.stderr,                                                       \
-           "Usage: %s <filename.xml> <template.mako> [<output_file>]"          \
+           "Usage: %s <filename.xml> <template.mako> [<output_file>] [<hal_version>]"          \
            % (sys.argv[0])
     sys.exit(0)
 
   file_name = sys.argv[1]
   template_name = sys.argv[2]
   output_name = sys.argv[3] if len(sys.argv) > 3 else None
+  hal_version = sys.argv[4] if len(sys.argv) > 4 else "3.2"
+
   parser = MetadataParserXml.create_from_file(file_name)
-  parser.render(template_name, output_name)
+  parser.render(template_name, output_name, hal_version)
 
   sys.exit(0)
index 3022abb..02689f0 100644 (file)
 
         <%def name="insert_entry(prop)">
         % if prop.is_clone():
-            <clone entry="${prop.name}" kind="${prop.target_kind}">
+            <clone entry="${prop.name}" kind="${prop.target_kind}"
+          % if ('hal_version' in prop._property_keys):
+                hal_version="${prop.hal_major_version}.${prop.hal_minor_version}"
+          % endif
+            >
 
               % if prop.details is not None:
                 <details>${prop.details}</details>
               % endif
 
+              % if prop.ndk_details is not None:
+                <ndk_details>${prop.ndk_details}</ndk_details>
+              % endif
+
               % if prop.hal_details is not None:
                 <hal_details>${prop.hal_details}</hal_details>
               % endif
           % if prop.hwlevel:
                 hwlevel="${prop.hwlevel}"
           % endif
+
+          % if (prop.hal_major_version, prop.hal_minor_version) != (3,2):
+                hal_version="${prop.hal_major_version}.${prop.hal_minor_version}"
+          % endif
             >
 
               % if prop.container == 'array':
                     % if value.id is not None:
                              id="${value.id}"
                     % endif
+                    % if not (value.hal_major_version == prop.hal_major_version and value.hal_minor_version == prop.hal_minor_version):
+                             hal_version=${"%d.%d" % (value.hal_major_version, value.hal_minor_version)}
+                    % endif
                       >${value.name}
                     % if value.notes is not None:
                              <notes>${value.notes}</notes>
                     % endif
+                    % if value.sdk_notes is not None:
+                             <sdk_notes>${value.sdk_notes}</sdk_notes>
+                    % endif
+                    % if value.ndk_notes is not None:
+                             <ndk_notes>${value.ndk_notes}</ndk_notes>
+                    % endif
+
                       </value>
                   % endfor
                 </enum>
                 <description>${prop.description | x}</description>
               % endif
 
+              % if prop.deprecation_description is not None:
+                <deprecation_description>${prop.deprecation_description | x}</deprecation_description>
+              % endif
+
               % if prop.units is not None:
                 <units>${prop.units | x}</units>
               % endif
                 <details>${prop.details | x}</details>
               % endif
 
+              % if prop.ndk_details is not None:
+                <ndk_details>${prop.ndk_details}</ndk_details>
+              % endif
+
               % if prop.hal_details is not None:
                 <hal_details>${prop.hal_details | x}</hal_details>
               % endif
index 8260005..ede1449 100755 (executable)
@@ -222,6 +222,29 @@ def validate_clones(soup):
       validate_error(error_msg)
       success = False
 
+    if matching_entry is not None:
+      entry_hal_major_version = 3
+      entry_hal_minor_version = 2
+      entry_hal_version = matching_entry.get('hal_version')
+      if entry_hal_version is not None:
+        entry_hal_major_version = int(entry_hal_version.partition('.')[0])
+        entry_hal_minor_version = int(entry_hal_version.partition('.')[2])
+
+      clone_hal_major_version = entry_hal_major_version
+      clone_hal_minor_version = entry_hal_minor_version
+      clone_hal_version = clone.get('hal_version')
+      if clone_hal_version is not None:
+        clone_hal_major_version = int(clone_hal_version.partition('.')[0])
+        clone_hal_minor_version = int(clone_hal_version.partition('.')[2])
+
+      if clone_hal_major_version < entry_hal_major_version or \
+          (clone_hal_major_version == entry_hal_major_version and \
+           clone_hal_minor_version < entry_hal_minor_version):
+        error_msg = ("Clone '%s' HAL version '%d.%d' is older than entry target HAL version '%d.%d'" \
+                   % (clone_name, clone_hal_major_version, clone_hal_minor_version, entry_hal_major_version, entry_hal_minor_version))
+        validate_error(error_msg)
+        success = False
+
   return success
 
 # All <entry> elements with container=$foo have a <$foo> child
@@ -281,6 +304,20 @@ def validate_entries(soup):
                                   ))
         success = False
 
+    deprecated = entry.attrs.get('deprecated')
+    if deprecated and deprecated == 'true':
+      if entry.deprecation_description is None:
+        validate_error(("Entry '%s' in kind '%s' is deprecated, but missing deprecation description") \
+                       % (fully_qualified_name(entry), find_kind(entry),
+                       ))
+        success = False
+    else:
+      if entry.deprecation_description is not None:
+        validate_error(("Entry '%s' in kind '%s' has deprecation description, but is not deprecated") \
+                       % (fully_qualified_name(entry), find_kind(entry),
+                       ))
+        success = False
+
   return success
 
 def validate_xml(xml):
index f2ba1b0..dfbe632 100644 (file)
@@ -112,7 +112,7 @@ ${entry.description | ndkdoc(metadata)}\
      * </ul></p>
      *
               % if entry.details:
-${entry.details | ndkdoc(metadata)}\
+${entry.applied_ndk_details | ndkdoc(metadata)}\
               % endif
      */
             % endif
@@ -168,6 +168,9 @@ typedef enum acamera_metadata_enum_${csym(ndk(entry.name)).lower()} {
           % if val.notes:
 ${val.notes | ndkdoc(metadata)}\
           % endif
+          % if val.ndk_notes:
+${val.ndk_notes | ndkdoc(metadata)}\
+          % endif
           % if val.deprecated:
      *
      * <b>Deprecated</b>: please refer to this API documentation to find the alternatives
diff --git a/camera/docs/ndk_metadata_properties.xml b/camera/docs/ndk_metadata_properties.xml
deleted file mode 100644 (file)
index 6f04e17..0000000
+++ /dev/null
@@ -1,9042 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2016 The Android Open Source Project
-
-     Licensed under the Apache License, Version 2.0 (the "License");
-     you may not use this file except in compliance with the License.
-     You may obtain a copy of the License at
-
-          http://www.apache.org/licenses/LICENSE-2.0
-
-     Unless required by applicable law or agreed to in writing, software
-     distributed under the License is distributed on an "AS IS" BASIS,
-     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-     See the License for the specific language governing permissions and
-     limitations under the License.
--->
-<metadata xmlns="http://schemas.android.com/service/camera/metadata/"
-xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_properties.xsd">
-
-  <tags>
-    <tag id="BC">
-        Needed for backwards compatibility with old Java API
-    </tag>
-    <tag id="V1">
-        New features for first camera 2 release (API1)
-    </tag>
-    <tag id="RAW">
-        Needed for useful RAW image processing and DNG file support
-    </tag>
-    <tag id="HAL2">
-        Entry is only used by camera device HAL 2.x
-    </tag>
-    <tag id="FULL">
-        Entry is required for full hardware level devices, and optional for other hardware levels
-    </tag>
-    <tag id="DEPTH">
-        Entry is required for the depth capability.
-    </tag>
-    <tag id="REPROC">
-        Entry is required for the YUV or PRIVATE reprocessing capability.
-    </tag>
-    <tag id="FUTURE">
-        Entry is  under-specified and is not required for now. This is for book-keeping purpose,
-        do not implement or use it, it may be revised for future.
-    </tag>
-  </tags>
-
-  <types>
-    <typedef name="pairFloatFloat">
-      <language name="java">android.util.Pair&lt;Float,Float&gt;</language>
-    </typedef>
-    <typedef name="pairDoubleDouble">
-      <language name="java">android.util.Pair&lt;Double,Double&gt;</language>
-    </typedef>
-    <typedef name="rectangle">
-      <language name="java">android.graphics.Rect</language>
-    </typedef>
-    <typedef name="size">
-      <language name="java">android.util.Size</language>
-    </typedef>
-    <typedef name="string">
-      <language name="java">String</language>
-    </typedef>
-    <typedef name="boolean">
-      <language name="java">boolean</language>
-    </typedef>
-    <typedef name="imageFormat">
-      <language name="java">int</language>
-    </typedef>
-    <typedef name="streamConfigurationMap">
-      <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language>
-    </typedef>
-    <typedef name="streamConfiguration">
-      <language name="java">android.hardware.camera2.params.StreamConfiguration</language>
-    </typedef>
-    <typedef name="streamConfigurationDuration">
-      <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language>
-    </typedef>
-    <typedef name="face">
-      <language name="java">android.hardware.camera2.params.Face</language>
-    </typedef>
-    <typedef name="meteringRectangle">
-      <language name="java">android.hardware.camera2.params.MeteringRectangle</language>
-    </typedef>
-    <typedef name="rangeFloat">
-      <language name="java">android.util.Range&lt;Float&gt;</language>
-    </typedef>
-    <typedef name="rangeInt">
-      <language name="java">android.util.Range&lt;Integer&gt;</language>
-    </typedef>
-    <typedef name="rangeLong">
-      <language name="java">android.util.Range&lt;Long&gt;</language>
-    </typedef>
-    <typedef name="colorSpaceTransform">
-      <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language>
-    </typedef>
-    <typedef name="rggbChannelVector">
-      <language name="java">android.hardware.camera2.params.RggbChannelVector</language>
-    </typedef>
-    <typedef name="blackLevelPattern">
-      <language name="java">android.hardware.camera2.params.BlackLevelPattern</language>
-    </typedef>
-    <typedef name="enumList">
-      <language name="java">int</language>
-    </typedef>
-    <typedef name="sizeF">
-      <language name="java">android.util.SizeF</language>
-    </typedef>
-    <typedef name="point">
-      <language name="java">android.graphics.Point</language>
-    </typedef>
-    <typedef name="tonemapCurve">
-      <language name="java">android.hardware.camera2.params.TonemapCurve</language>
-    </typedef>
-    <typedef name="lensShadingMap">
-      <language name="java">android.hardware.camera2.params.LensShadingMap</language>
-    </typedef>
-    <typedef name="location">
-      <language name="java">android.location.Location</language>
-    </typedef>
-    <typedef name="highSpeedVideoConfiguration">
-      <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language>
-    </typedef>
-    <typedef name="reprocessFormatsMap">
-      <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language>
-    </typedef>
-  </types>
-
-  <namespace name="android">
-    <section name="colorCorrection">
-      <controls>
-        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
-          <enum>
-            <value>TRANSFORM_MATRIX
-              <notes>Use the android.colorCorrection.transform matrix
-                and android.colorCorrection.gains to do color conversion.
-
-                All advanced white balance adjustments (not specified
-                by our white balance pipeline) must be disabled.
-
-                If AWB is enabled with `android.control.awbMode != OFF`, then
-                TRANSFORM_MATRIX is ignored. The camera device will override
-                this value to either FAST or HIGH_QUALITY.
-              </notes>
-            </value>
-            <value>FAST
-              <notes>Color correction processing must not slow down
-              capture rate relative to sensor raw output.
-
-              Advanced white balance adjustments above and beyond
-              the specified white balance pipeline may be applied.
-
-              If AWB is enabled with `android.control.awbMode != OFF`, then
-              the camera device uses the last frame's AWB values
-              (or defaults if AWB has never been run).
-            </notes>
-            </value>
-            <value>HIGH_QUALITY
-              <notes>Color correction processing operates at improved
-              quality but the capture rate might be reduced (relative to sensor
-              raw output rate)
-
-              Advanced white balance adjustments above and beyond
-              the specified white balance pipeline may be applied.
-
-              If AWB is enabled with `android.control.awbMode != OFF`, then
-              the camera device uses the last frame's AWB values
-              (or defaults if AWB has never been run).
-            </notes>
-            </value>
-          </enum>
-
-          <description>
-          The mode control selects how the image data is converted from the
-          sensor's native color into linear sRGB color.
-          </description>
-          <details>
-          When auto-white balance (AWB) is enabled with android.control.awbMode, this
-          control is overridden by the AWB routine. When AWB is disabled, the
-          application controls how the color mapping is performed.
-
-          We define the expected processing pipeline below. For consistency
-          across devices, this is always the case with TRANSFORM_MATRIX.
-
-          When either FULL or HIGH_QUALITY is used, the camera device may
-          do additional processing but android.colorCorrection.gains and
-          android.colorCorrection.transform will still be provided by the
-          camera device (in the results) and be roughly correct.
-
-          Switching to TRANSFORM_MATRIX and using the data provided from
-          FAST or HIGH_QUALITY will yield a picture with the same white point
-          as what was produced by the camera device in the earlier frame.
-
-          The expected processing pipeline is as follows:
-
-          ![White balance processing pipeline](android.colorCorrection.mode/processing_pipeline.png)
-
-          The white balance is encoded by two values, a 4-channel white-balance
-          gain vector (applied in the Bayer domain), and a 3x3 color transform
-          matrix (applied after demosaic).
-
-          The 4-channel white-balance gains are defined as:
-
-              android.colorCorrection.gains = [ R G_even G_odd B ]
-
-          where `G_even` is the gain for green pixels on even rows of the
-          output, and `G_odd` is the gain for green pixels on the odd rows.
-          These may be identical for a given camera device implementation; if
-          the camera device does not support a separate gain for even/odd green
-          channels, it will use the `G_even` value, and write `G_odd` equal to
-          `G_even` in the output result metadata.
-
-          The matrices for color transforms are defined as a 9-entry vector:
-
-              android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
-
-          which define a transform from input sensor colors, `P_in = [ r g b ]`,
-          to output linear sRGB, `P_out = [ r' g' b' ]`,
-
-          with colors as follows:
-
-              r' = I0r + I1g + I2b
-              g' = I3r + I4g + I5b
-              b' = I6r + I7g + I8b
-
-          Both the input and output value ranges must match. Overflow/underflow
-          values are clipped to fit within the range.
-          </details>
-          <hal_details>
-          HAL must support both FAST and HIGH_QUALITY if color correction control is available
-          on the camera device, but the underlying implementation can be the same for both modes.
-          That is, if the highest quality implementation on the camera device does not slow down
-          capture rate, then FAST and HIGH_QUALITY should generate the same output.
-          </hal_details>
-        </entry>
-        <entry name="transform" type="rational" visibility="public"
-               type_notes="3x3 rational matrix in row-major order"
-               container="array" typedef="colorSpaceTransform" hwlevel="full">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>A color transform matrix to use to transform
-          from sensor RGB color space to output linear sRGB color space.
-          </description>
-          <units>Unitless scale factors</units>
-          <details>This matrix is either set by the camera device when the request
-          android.colorCorrection.mode is not TRANSFORM_MATRIX, or
-          directly by the application in the request when the
-          android.colorCorrection.mode is TRANSFORM_MATRIX.
-
-          In the latter case, the camera device may round the matrix to account
-          for precision issues; the final rounded matrix should be reported back
-          in this matrix result metadata. The transform should keep the magnitude
-          of the output color values within `[0, 1.0]` (assuming input color
-          values is within the normalized range `[0, 1.0]`), or clipping may occur.
-
-          The valid range of each matrix element varies on different devices, but
-          values within [-1.5, 3.0] are guaranteed not to be clipped.
-          </details>
-        </entry>
-        <entry name="gains" type="float" visibility="public"
-               type_notes="A 1D array of floats for 4 color channel gains"
-               container="array" typedef="rggbChannelVector" hwlevel="full">
-          <array>
-            <size>4</size>
-          </array>
-          <description>Gains applying to Bayer raw color channels for
-          white-balance.</description>
-          <units>Unitless gain factors</units>
-          <details>
-          These per-channel gains are either set by the camera device
-          when the request android.colorCorrection.mode is not
-          TRANSFORM_MATRIX, or directly by the application in the
-          request when the android.colorCorrection.mode is
-          TRANSFORM_MATRIX.
-
-          The gains in the result metadata are the gains actually
-          applied by the camera device to the current frame.
-
-          The valid range of gains varies on different devices, but gains
-          between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
-          device allows gains below 1.0, this is usually not recommended because
-          this can create color artifacts.
-          </details>
-          <hal_details>
-          The 4-channel white-balance gains are defined in
-          the order of `[R G_even G_odd B]`, where `G_even` is the gain
-          for green pixels on even rows of the output, and `G_odd`
-          is the gain for green pixels on the odd rows.
-
-          If a HAL does not support a separate gain for even/odd green
-          channels, it must use the `G_even` value, and write
-          `G_odd` equal to `G_even` in the output result metadata.
-          </hal_details>
-        </entry>
-        <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
-          <enum>
-            <value>OFF
-              <notes>
-                No aberration correction is applied.
-              </notes>
-            </value>
-            <value>FAST
-              <notes>
-                Aberration correction will not slow down capture rate
-                relative to sensor raw output.
-            </notes>
-            </value>
-            <value>HIGH_QUALITY
-              <notes>
-                Aberration correction operates at improved quality but the capture rate might be
-                reduced (relative to sensor raw output rate)
-            </notes>
-            </value>
-          </enum>
-          <description>
-            Mode of operation for the chromatic aberration correction algorithm.
-          </description>
-          <range>android.colorCorrection.availableAberrationModes</range>
-          <details>
-            Chromatic (color) aberration is caused by the fact that different wavelengths of light
-            can not focus on the same point after exiting from the lens. This metadata defines
-            the high level control of chromatic aberration correction algorithm, which aims to
-            minimize the chromatic artifacts that may occur along the object boundaries in an
-            image.
-
-            FAST/HIGH_QUALITY both mean that camera device determined aberration
-            correction will be applied. HIGH_QUALITY mode indicates that the camera device will
-            use the highest-quality aberration correction algorithms, even if it slows down
-            capture rate. FAST means the camera device will not slow down capture rate when
-            applying aberration correction.
-
-            LEGACY devices will always be in FAST mode.
-          </details>
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.colorCorrection.mode" kind="controls">
-        </clone>
-        <clone entry="android.colorCorrection.transform" kind="controls">
-        </clone>
-        <clone entry="android.colorCorrection.gains" kind="controls">
-        </clone>
-        <clone entry="android.colorCorrection.aberrationMode" kind="controls">
-        </clone>
-      </dynamic>
-      <static>
-        <entry name="availableAberrationModes" type="byte" visibility="public"
-        type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-            List of aberration correction modes for android.colorCorrection.aberrationMode that are
-            supported by this camera device.
-          </description>
-          <range>Any value listed in android.colorCorrection.aberrationMode</range>
-          <details>
-            This key lists the valid modes for android.colorCorrection.aberrationMode.  If no
-            aberration correction modes are available for a device, this list will solely include
-            OFF mode. All camera devices will support either OFF or FAST mode.
-
-            Camera devices that support the MANUAL_POST_PROCESSING capability will always list
-            OFF mode. This includes all FULL level devices.
-
-            LEGACY devices will always only support FAST mode.
-          </details>
-          <hal_details>
-            HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available
-            on the camera device, but the underlying implementation can be the same for both modes.
-            That is, if the highest quality implementation on the camera device does not slow down
-            capture rate, then FAST and HIGH_QUALITY will generate the same output.
-          </hal_details>
-          <tag id="V1" />
-        </entry>
-      </static>
-    </section>
-    <section name="control">
-      <controls>
-        <entry name="aeAntibandingMode" type="byte" visibility="public"
-               enum="true" hwlevel="legacy">
-          <enum>
-            <value>OFF
-              <notes>
-                The camera device will not adjust exposure duration to
-                avoid banding problems.
-              </notes>
-            </value>
-            <value>50HZ
-              <notes>
-                The camera device will adjust exposure duration to
-                avoid banding problems with 50Hz illumination sources.
-              </notes>
-            </value>
-            <value>60HZ
-              <notes>
-                The camera device will adjust exposure duration to
-                avoid banding problems with 60Hz illumination
-                sources.
-              </notes>
-            </value>
-            <value>AUTO
-              <notes>
-                The camera device will automatically adapt its
-                antibanding routine to the current illumination
-                condition. This is the default mode if AUTO is
-                available on given camera device.
-              </notes>
-            </value>
-          </enum>
-          <description>
-            The desired setting for the camera device's auto-exposure
-            algorithm's antibanding compensation.
-          </description>
-          <range>
-            android.control.aeAvailableAntibandingModes
-          </range>
-          <details>
-            Some kinds of lighting fixtures, such as some fluorescent
-            lights, flicker at the rate of the power supply frequency
-            (60Hz or 50Hz, depending on country). While this is
-            typically not noticeable to a person, it can be visible to
-            a camera device. If a camera sets its exposure time to the
-            wrong value, the flicker may become visible in the
-            viewfinder as flicker or in a final captured image, as a
-            set of variable-brightness bands across the image.
-
-            Therefore, the auto-exposure routines of camera devices
-            include antibanding routines that ensure that the chosen
-            exposure value will not cause such banding. The choice of
-            exposure time depends on the rate of flicker, which the
-            camera device can detect automatically, or the expected
-            rate can be selected by the application using this
-            control.
-
-            A given camera device may not support all of the possible
-            options for the antibanding mode. The
-            android.control.aeAvailableAntibandingModes key contains
-            the available modes for a given camera device.
-
-            AUTO mode is the default if it is available on given
-            camera device. When AUTO mode is not available, the
-            default will be either 50HZ or 60HZ, and both 50HZ
-            and 60HZ will be available.
-
-            If manual exposure control is enabled (by setting
-            android.control.aeMode or android.control.mode to OFF),
-            then this setting has no effect, and the application must
-            ensure it selects exposure times that do not cause banding
-            issues. The android.statistics.sceneFlicker key can assist
-            the application in this.
-          </details>
-          <hal_details>
-            For all capture request templates, this field must be set
-            to AUTO if AUTO mode is available. If AUTO is not available,
-            the default must be either 50HZ or 60HZ, and both 50HZ and
-            60HZ must be available.
-
-            If manual exposure control is enabled (by setting
-            android.control.aeMode or android.control.mode to OFF),
-            then the exposure values provided by the application must not be
-            adjusted for antibanding.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy">
-          <description>Adjustment to auto-exposure (AE) target image
-          brightness.</description>
-          <units>Compensation steps</units>
-          <range>android.control.aeCompensationRange</range>
-          <details>
-          The adjustment is measured as a count of steps, with the
-          step size defined by android.control.aeCompensationStep and the
-          allowed range by android.control.aeCompensationRange.
-
-          For example, if the exposure value (EV) step is 0.333, '6'
-          will mean an exposure compensation of +2 EV; -3 will mean an
-          exposure compensation of -1 EV. One EV represents a doubling
-          of image brightness. Note that this control will only be
-          effective if android.control.aeMode `!=` OFF. This control
-          will take effect even when android.control.aeLock `== true`.
-
-          In the event of exposure compensation value being changed, camera device
-          may take several frames to reach the newly requested exposure target.
-          During that time, android.control.aeState field will be in the SEARCHING
-          state. Once the new exposure target is reached, android.control.aeState will
-          change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
-          FLASH_REQUIRED (if the scene is too dark for still capture).
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeLock" type="byte" visibility="public" enum="true"
-               typedef="boolean" hwlevel="legacy">
-          <enum>
-            <value>OFF
-            <notes>Auto-exposure lock is disabled; the AE algorithm
-            is free to update its parameters.</notes></value>
-            <value>ON
-            <notes>Auto-exposure lock is enabled; the AE algorithm
-            must not update the exposure and sensitivity parameters
-            while the lock is active.
-
-            android.control.aeExposureCompensation setting changes
-            will still take effect while auto-exposure is locked.
-
-            Some rare LEGACY devices may not support
-            this, in which case the value will always be overridden to OFF.
-            </notes></value>
-          </enum>
-          <description>Whether auto-exposure (AE) is currently locked to its latest
-          calculated values.</description>
-          <details>
-          When set to `true` (ON), the AE algorithm is locked to its latest parameters,
-          and will not change exposure settings until the lock is set to `false` (OFF).
-
-          Note that even when AE is locked, the flash may be fired if
-          the android.control.aeMode is ON_AUTO_FLASH /
-          ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.
-
-          When android.control.aeExposureCompensation is changed, even if the AE lock
-          is ON, the camera device will still adjust its exposure value.
-
-          If AE precapture is triggered (see android.control.aePrecaptureTrigger)
-          when AE is already locked, the camera device will not change the exposure time
-          (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity)
-          parameters. The flash may be fired if the android.control.aeMode
-          is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
-          android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed.
-          Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.
-
-          When an AE precapture sequence is triggered, AE unlock will not be able to unlock
-          the AE if AE is locked by the camera device internally during precapture metering
-          sequence In other words, submitting requests with AE unlock has no effect for an
-          ongoing precapture metering sequence. Otherwise, the precapture metering sequence
-          will never succeed in a sequence of preview requests where AE lock is always set
-          to `false`.
-
-          Since the camera device has a pipeline of in-flight requests, the settings that
-          get locked do not necessarily correspond to the settings that were present in the
-          latest capture result received from the camera device, since additional captures
-          and AE updates may have occurred even before the result was sent out. If an
-          application is switching between automatic and manual control and wishes to eliminate
-          any flicker during the switch, the following procedure is recommended:
-
-            1. Starting in auto-AE mode:
-            2. Lock AE
-            3. Wait for the first result to be output that has the AE locked
-            4. Copy exposure settings from that result into a request, set the request to manual AE
-            5. Submit the capture request, proceed to run manual AE as desired.
-
-          See android.control.aeState for AE lock related state transition details.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy">
-          <enum>
-            <value>OFF
-              <notes>
-                The camera device's autoexposure routine is disabled.
-
-                The application-selected android.sensor.exposureTime,
-                android.sensor.sensitivity and
-                android.sensor.frameDuration are used by the camera
-                device, along with android.flash.* fields, if there's
-                a flash unit for this camera device.
-
-                Note that auto-white balance (AWB) and auto-focus (AF)
-                behavior is device dependent when AE is in OFF mode.
-                To have consistent behavior across different devices,
-                it is recommended to either set AWB and AF to OFF mode
-                or lock AWB and AF before setting AE to OFF.
-                See android.control.awbMode, android.control.afMode,
-                android.control.awbLock, and android.control.afTrigger
-                for more details.
-
-                LEGACY devices do not support the OFF mode and will
-                override attempts to use this value to ON.
-              </notes>
-            </value>
-            <value>ON
-              <notes>
-                The camera device's autoexposure routine is active,
-                with no flash control.
-
-                The application's values for
-                android.sensor.exposureTime,
-                android.sensor.sensitivity, and
-                android.sensor.frameDuration are ignored. The
-                application has control over the various
-                android.flash.* fields.
-              </notes>
-            </value>
-            <value>ON_AUTO_FLASH
-              <notes>
-                Like ON, except that the camera device also controls
-                the camera's flash unit, firing it in low-light
-                conditions.
-
-                The flash may be fired during a precapture sequence
-                (triggered by android.control.aePrecaptureTrigger) and
-                may be fired for captures for which the
-                android.control.captureIntent field is set to
-                STILL_CAPTURE
-              </notes>
-            </value>
-            <value>ON_ALWAYS_FLASH
-              <notes>
-                Like ON, except that the camera device also controls
-                the camera's flash unit, always firing it for still
-                captures.
-
-                The flash may be fired during a precapture sequence
-                (triggered by android.control.aePrecaptureTrigger) and
-                will always be fired for captures for which the
-                android.control.captureIntent field is set to
-                STILL_CAPTURE
-              </notes>
-            </value>
-            <value>ON_AUTO_FLASH_REDEYE
-              <notes>
-                Like ON_AUTO_FLASH, but with automatic red eye
-                reduction.
-
-                If deemed necessary by the camera device, a red eye
-                reduction flash will fire during the precapture
-                sequence.
-              </notes>
-            </value>
-          </enum>
-          <description>The desired mode for the camera device's
-          auto-exposure routine.</description>
-          <range>android.control.aeAvailableModes</range>
-          <details>
-            This control is only effective if android.control.mode is
-            AUTO.
-
-            When set to any of the ON modes, the camera device's
-            auto-exposure routine is enabled, overriding the
-            application's selected exposure time, sensor sensitivity,
-            and frame duration (android.sensor.exposureTime,
-            android.sensor.sensitivity, and
-            android.sensor.frameDuration). If one of the FLASH modes
-            is selected, the camera device's flash unit controls are
-            also overridden.
-
-            The FLASH modes are only available if the camera device
-            has a flash unit (android.flash.info.available is `true`).
-
-            If flash TORCH mode is desired, this field must be set to
-            ON or OFF, and android.flash.mode set to TORCH.
-
-            When set to any of the ON modes, the values chosen by the
-            camera device auto-exposure routine for the overridden
-            fields for a given capture will be available in its
-            CaptureResult.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeRegions" type="int32" visibility="public"
-            optional="true" container="array" typedef="meteringRectangle">
-          <array>
-            <size>5</size>
-            <size>area_count</size>
-          </array>
-          <description>List of metering areas to use for auto-exposure adjustment.</description>
-          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
-          <range>Coordinates must be between `[(0,0), (width, height))` of
-          android.sensor.info.activeArraySize</range>
-          <details>
-              Not available if android.control.maxRegionsAe is 0.
-              Otherwise will always be present.
-
-              The maximum number of regions supported by the device is determined by the value
-              of android.control.maxRegionsAe.
-
-              The data representation is int[5 * area_count].
-              Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
-              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
-              ymax.
-
-              The coordinate system is based on the active pixel array,
-              with (0,0) being the top-left pixel in the active pixel array, and
-              (android.sensor.info.activeArraySize.width - 1,
-              android.sensor.info.activeArraySize.height - 1) being the
-              bottom-right pixel in the active pixel array.
-
-              The weight must be within `[0, 1000]`, and represents a weight
-              for every pixel in the area. This means that a large metering area
-              with the same weight as a smaller area will have more effect in
-              the metering result. Metering areas can partially overlap and the
-              camera device will add the weights in the overlap region.
-
-              The weights are relative to weights of other exposure metering regions, so if only one
-              region is used, all non-zero weights will have the same effect. A region with 0
-              weight is ignored.
-
-              If all regions have 0 weight, then no specific metering area needs to be used by the
-              camera device.
-
-              If the metering region is outside the used android.scaler.cropRegion returned in
-              capture result metadata, the camera device will ignore the sections outside the crop
-              region and output only the intersection rectangle as the metering region in the result
-              metadata.  If the region is entirely outside the crop region, it will be ignored and
-              not reported in the result metadata.
-          </details>
-          <hal_details>
-              The HAL level representation of MeteringRectangle[] is a
-              int[5 * area_count].
-              Every five elements represent a metering region of
-              (xmin, ymin, xmax, ymax, weight).
-              The rectangle is defined to be inclusive on xmin and ymin, but
-              exclusive on xmax and ymax.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeTargetFpsRange" type="int32" visibility="public"
-               container="array" typedef="rangeInt" hwlevel="legacy">
-          <array>
-            <size>2</size>
-          </array>
-          <description>Range over which the auto-exposure routine can
-          adjust the capture frame rate to maintain good
-          exposure.</description>
-          <units>Frames per second (FPS)</units>
-          <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range>
-          <details>Only constrains auto-exposure (AE) algorithm, not
-          manual control of android.sensor.exposureTime and
-          android.sensor.frameDuration.</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aePrecaptureTrigger" type="byte" visibility="public"
-               enum="true" hwlevel="limited">
-          <enum>
-            <value>IDLE
-              <notes>The trigger is idle.</notes>
-            </value>
-            <value>START
-              <notes>The precapture metering sequence will be started
-              by the camera device.
-
-              The exact effect of the precapture trigger depends on
-              the current AE mode and state.</notes>
-            </value>
-            <value>CANCEL
-              <notes>The camera device will cancel any currently active or completed
-              precapture metering sequence, the auto-exposure routine will return to its
-              initial state.</notes>
-            </value>
-          </enum>
-          <description>Whether the camera device will trigger a precapture
-          metering sequence when it processes this request.</description>
-          <details>This entry is normally set to IDLE, or is not
-          included at all in the request settings. When included and
-          set to START, the camera device will trigger the auto-exposure (AE)
-          precapture metering sequence.
-
-          When set to CANCEL, the camera device will cancel any active
-          precapture metering trigger, and return to its initial AE state.
-          If a precapture metering sequence is already completed, and the camera
-          device has implicitly locked the AE for subsequent still capture, the
-          CANCEL trigger will unlock the AE and return to its initial AE state.
-
-          The precapture sequence should be triggered before starting a
-          high-quality still capture for final metering decisions to
-          be made, and for firing pre-capture flash pulses to estimate
-          scene brightness and required final capture flash power, when
-          the flash is enabled.
-
-          Normally, this entry should be set to START for only a
-          single request, and the application should wait until the
-          sequence completes before starting a new one.
-
-          When a precapture metering sequence is finished, the camera device
-          may lock the auto-exposure routine internally to be able to accurately expose the
-          subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`).
-          For this case, the AE may not resume normal scan if no subsequent still capture is
-          submitted. To ensure that the AE routine restarts normal scan, the application should
-          submit a request with `android.control.aeLock == true`, followed by a request
-          with `android.control.aeLock == false`, if the application decides not to submit a
-          still capture request after the precapture sequence completes. Alternatively, for
-          API level 23 or newer devices, the CANCEL can be used to unlock the camera device
-          internally locked AE if the application doesn't submit a still capture request after
-          the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
-          be used in devices that have earlier API levels.
-
-          The exact effect of auto-exposure (AE) precapture trigger
-          depends on the current AE mode and state; see
-          android.control.aeState for AE precapture state transition
-          details.
-
-          On LEGACY-level devices, the precapture trigger is not supported;
-          capturing a high-resolution JPEG image will automatically trigger a
-          precapture sequence before the high-resolution capture, including
-          potentially firing a pre-capture flash.
-
-          Using the precapture trigger and the auto-focus trigger android.control.afTrigger
-          simultaneously is allowed. However, since these triggers often require cooperation between
-          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
-          focus sweep), the camera device may delay acting on a later trigger until the previous
-          trigger has been fully handled. This may lead to longer intervals between the trigger and
-          changes to android.control.aeState indicating the start of the precapture sequence, for
-          example.
-
-          If both the precapture and the auto-focus trigger are activated on the same request, then
-          the camera device will complete them in the optimal order for that device.
-          </details>
-          <hal_details>
-          The HAL must support triggering the AE precapture trigger while an AF trigger is active
-          (and vice versa), or at the same time as the AF trigger.  It is acceptable for the HAL to
-          treat these as two consecutive triggers, for example handling the AF trigger and then the
-          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
-          to minimize the latency for converging both focus and exposure/flash usage.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="afMode" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>OFF
-            <notes>The auto-focus routine does not control the lens;
-            android.lens.focusDistance is controlled by the
-            application.</notes></value>
-            <value>AUTO
-            <notes>Basic automatic focus mode.
-
-            In this mode, the lens does not move unless
-            the autofocus trigger action is called. When that trigger
-            is activated, AF will transition to ACTIVE_SCAN, then to
-            the outcome of the scan (FOCUSED or NOT_FOCUSED).
-
-            Always supported if lens is not fixed focus.
-
-            Use android.lens.info.minimumFocusDistance to determine if lens
-            is fixed-focus.
-
-            Triggering AF_CANCEL resets the lens position to default,
-            and sets the AF state to INACTIVE.</notes></value>
-            <value>MACRO
-            <notes>Close-up focusing mode.
-
-            In this mode, the lens does not move unless the
-            autofocus trigger action is called. When that trigger is
-            activated, AF will transition to ACTIVE_SCAN, then to
-            the outcome of the scan (FOCUSED or NOT_FOCUSED). This
-            mode is optimized for focusing on objects very close to
-            the camera.
-
-            When that trigger is activated, AF will transition to
-            ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
-            NOT_FOCUSED). Triggering cancel AF resets the lens
-            position to default, and sets the AF state to
-            INACTIVE.</notes></value>
-            <value>CONTINUOUS_VIDEO
-            <notes>In this mode, the AF algorithm modifies the lens
-            position continually to attempt to provide a
-            constantly-in-focus image stream.
-
-            The focusing behavior should be suitable for good quality
-            video recording; typically this means slower focus
-            movement and no overshoots. When the AF trigger is not
-            involved, the AF algorithm should start in INACTIVE state,
-            and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
-            states as appropriate. When the AF trigger is activated,
-            the algorithm should immediately transition into
-            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
-            lens position until a cancel AF trigger is received.
-
-            Once cancel is received, the algorithm should transition
-            back to INACTIVE and resume passive scan. Note that this
-            behavior is not identical to CONTINUOUS_PICTURE, since an
-            ongoing PASSIVE_SCAN must immediately be
-            canceled.</notes></value>
-            <value>CONTINUOUS_PICTURE
-            <notes>In this mode, the AF algorithm modifies the lens
-            position continually to attempt to provide a
-            constantly-in-focus image stream.
-
-            The focusing behavior should be suitable for still image
-            capture; typically this means focusing as fast as
-            possible. When the AF trigger is not involved, the AF
-            algorithm should start in INACTIVE state, and then
-            transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
-            appropriate as it attempts to maintain focus. When the AF
-            trigger is activated, the algorithm should finish its
-            PASSIVE_SCAN if active, and then transition into
-            AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
-            lens position until a cancel AF trigger is received.
-
-            When the AF cancel trigger is activated, the algorithm
-            should transition back to INACTIVE and then act as if it
-            has just been started.</notes></value>
-            <value>EDOF
-            <notes>Extended depth of field (digital focus) mode.
-
-            The camera device will produce images with an extended
-            depth of field automatically; no special focusing
-            operations need to be done before taking a picture.
-
-            AF triggers are ignored, and the AF state will always be
-            INACTIVE.</notes></value>
-          </enum>
-          <description>Whether auto-focus (AF) is currently enabled, and what
-          mode it is set to.</description>
-          <range>android.control.afAvailableModes</range>
-          <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus
-          (i.e. `android.lens.info.minimumFocusDistance &gt; 0`). Also note that
-          when android.control.aeMode is OFF, the behavior of AF is device
-          dependent. It is recommended to lock AF by using android.control.afTrigger before
-          setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF.
-
-          If the lens is controlled by the camera device auto-focus algorithm,
-          the camera device will report the current AF status in android.control.afState
-          in result metadata.</details>
-          <hal_details>
-          When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a
-          request (android.control.afTrigger `==` START). After an AF trigger, the afState will end
-          up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see
-          android.control.afState for detailed state transitions), which indicates that the lens is
-          locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move
-          after the lens is locked, the HAL must compensate this movement appropriately such that
-          the same focal plane remains in focus.
-
-          When afMode is one of the continuous auto focus modes, the HAL is free to start a AF
-          scan whenever it's not locked. When the lens is locked after an AF trigger
-          (see android.control.afState for detailed state transitions), the HAL should maintain the
-          same lock behavior as above.
-
-          When afMode is OFF, the application controls focus manually. The accuracy of the
-          focus distance control depends on the android.lens.info.focusDistanceCalibration.
-          However, the lens must not move regardless of the camera movement for any focus distance
-          manual control.
-
-          To put this in concrete terms, if the camera has lens elements which may move based on
-          camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to
-          remain in a fixed position invariant to the camera's orientation or motion, for example,
-          by using accelerometer measurements in the lens control logic. This is a typical issue
-          that will arise on camera modules with open-loop VCMs.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="afRegions" type="int32" visibility="public"
-               optional="true" container="array" typedef="meteringRectangle">
-          <array>
-            <size>5</size>
-            <size>area_count</size>
-          </array>
-          <description>List of metering areas to use for auto-focus.</description>
-          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
-          <range>Coordinates must be between `[(0,0), (width, height))` of
-          android.sensor.info.activeArraySize</range>
-          <details>
-              Not available if android.control.maxRegionsAf is 0.
-              Otherwise will always be present.
-
-              The maximum number of focus areas supported by the device is determined by the value
-              of android.control.maxRegionsAf.
-
-              The data representation is int[5 * area_count].
-              Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
-              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
-              ymax.
-
-              The coordinate system is based on the active pixel array,
-              with (0,0) being the top-left pixel in the active pixel array, and
-              (android.sensor.info.activeArraySize.width - 1,
-              android.sensor.info.activeArraySize.height - 1) being the
-              bottom-right pixel in the active pixel array.
-
-              The weight must be within `[0, 1000]`, and represents a weight
-              for every pixel in the area. This means that a large metering area
-              with the same weight as a smaller area will have more effect in
-              the metering result. Metering areas can partially overlap and the
-              camera device will add the weights in the overlap region.
-
-              The weights are relative to weights of other metering regions, so if only one region
-              is used, all non-zero weights will have the same effect. A region with 0 weight is
-              ignored.
-
-              If all regions have 0 weight, then no specific metering area needs to be used by the
-              camera device.
-
-              If the metering region is outside the used android.scaler.cropRegion returned in
-              capture result metadata, the camera device will ignore the sections outside the crop
-              region and output only the intersection rectangle as the metering region in the result
-              metadata. If the region is entirely outside the crop region, it will be ignored and
-              not reported in the result metadata.
-          </details>
-          <hal_details>
-              The HAL level representation of MeteringRectangle[] is a
-              int[5 * area_count].
-              Every five elements represent a metering region of
-              (xmin, ymin, xmax, ymax, weight).
-              The rectangle is defined to be inclusive on xmin and ymin, but
-              exclusive on xmax and ymax.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="afTrigger" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>IDLE
-              <notes>The trigger is idle.</notes>
-            </value>
-            <value>START
-              <notes>Autofocus will trigger now.</notes>
-            </value>
-            <value>CANCEL
-              <notes>Autofocus will return to its initial
-              state, and cancel any currently active trigger.</notes>
-            </value>
-          </enum>
-          <description>
-          Whether the camera device will trigger autofocus for this request.
-          </description>
-          <details>This entry is normally set to IDLE, or is not
-          included at all in the request settings.
-
-          When included and set to START, the camera device will trigger the
-          autofocus algorithm. If autofocus is disabled, this trigger has no effect.
-
-          When set to CANCEL, the camera device will cancel any active trigger,
-          and return to its initial AF state.
-
-          Generally, applications should set this entry to START or CANCEL for only a
-          single capture, and then return it to IDLE (or not set at all). Specifying
-          START for multiple captures in a row means restarting the AF operation over
-          and over again.
-
-          See android.control.afState for what the trigger means for each AF mode.
-
-          Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger
-          simultaneously is allowed. However, since these triggers often require cooperation between
-          the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
-          focus sweep), the camera device may delay acting on a later trigger until the previous
-          trigger has been fully handled. This may lead to longer intervals between the trigger and
-          changes to android.control.afState, for example.
-          </details>
-          <hal_details>
-          The HAL must support triggering the AF trigger while an AE precapture trigger is active
-          (and vice versa), or at the same time as the AE trigger.  It is acceptable for the HAL to
-          treat these as two consecutive triggers, for example handling the AF trigger and then the
-          AE trigger.  Or the HAL may choose to optimize the case with both triggers fired at once,
-          to minimize the latency for converging both focus and exposure/flash usage.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="awbLock" type="byte" visibility="public" enum="true"
-               typedef="boolean" hwlevel="legacy">
-          <enum>
-            <value>OFF
-            <notes>Auto-white balance lock is disabled; the AWB
-            algorithm is free to update its parameters if in AUTO
-            mode.</notes></value>
-            <value>ON
-            <notes>Auto-white balance lock is enabled; the AWB
-            algorithm will not update its parameters while the lock
-            is active.</notes></value>
-          </enum>
-          <description>Whether auto-white balance (AWB) is currently locked to its
-          latest calculated values.</description>
-          <details>
-          When set to `true` (ON), the AWB algorithm is locked to its latest parameters,
-          and will not change color balance settings until the lock is set to `false` (OFF).
-
-          Since the camera device has a pipeline of in-flight requests, the settings that
-          get locked do not necessarily correspond to the settings that were present in the
-          latest capture result received from the camera device, since additional captures
-          and AWB updates may have occurred even before the result was sent out. If an
-          application is switching between automatic and manual control and wishes to eliminate
-          any flicker during the switch, the following procedure is recommended:
-
-            1. Starting in auto-AWB mode:
-            2. Lock AWB
-            3. Wait for the first result to be output that has the AWB locked
-            4. Copy AWB settings from that result into a request, set the request to manual AWB
-            5. Submit the capture request, proceed to run manual AWB as desired.
-
-          Note that AWB lock is only meaningful when
-          android.control.awbMode is in the AUTO mode; in other modes,
-          AWB is already fixed to a specific setting.
-
-          Some LEGACY devices may not support ON; the value is then overridden to OFF.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="awbMode" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>OFF
-            <notes>
-            The camera device's auto-white balance routine is disabled.
-
-            The application-selected color transform matrix
-            (android.colorCorrection.transform) and gains
-            (android.colorCorrection.gains) are used by the camera
-            device for manual white balance control.
-            </notes>
-            </value>
-            <value>AUTO
-            <notes>
-            The camera device's auto-white balance routine is active.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-            <value>INCANDESCENT
-            <notes>
-            The camera device's auto-white balance routine is disabled;
-            the camera device uses incandescent light as the assumed scene
-            illumination for white balance.
-
-            While the exact white balance transforms are up to the
-            camera device, they will approximately match the CIE
-            standard illuminant A.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-            <value>FLUORESCENT
-            <notes>
-            The camera device's auto-white balance routine is disabled;
-            the camera device uses fluorescent light as the assumed scene
-            illumination for white balance.
-
-            While the exact white balance transforms are up to the
-            camera device, they will approximately match the CIE
-            standard illuminant F2.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-            <value>WARM_FLUORESCENT
-            <notes>
-            The camera device's auto-white balance routine is disabled;
-            the camera device uses warm fluorescent light as the assumed scene
-            illumination for white balance.
-
-            While the exact white balance transforms are up to the
-            camera device, they will approximately match the CIE
-            standard illuminant F4.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-            <value>DAYLIGHT
-            <notes>
-            The camera device's auto-white balance routine is disabled;
-            the camera device uses daylight light as the assumed scene
-            illumination for white balance.
-
-            While the exact white balance transforms are up to the
-            camera device, they will approximately match the CIE
-            standard illuminant D65.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-            <value>CLOUDY_DAYLIGHT
-            <notes>
-            The camera device's auto-white balance routine is disabled;
-            the camera device uses cloudy daylight light as the assumed scene
-            illumination for white balance.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-            <value>TWILIGHT
-            <notes>
-            The camera device's auto-white balance routine is disabled;
-            the camera device uses twilight light as the assumed scene
-            illumination for white balance.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-            <value>SHADE
-            <notes>
-            The camera device's auto-white balance routine is disabled;
-            the camera device uses shade light as the assumed scene
-            illumination for white balance.
-
-            The application's values for android.colorCorrection.transform
-            and android.colorCorrection.gains are ignored.
-            For devices that support the MANUAL_POST_PROCESSING capability, the
-            values used by the camera device for the transform and gains
-            will be available in the capture result for this request.
-            </notes>
-            </value>
-          </enum>
-          <description>Whether auto-white balance (AWB) is currently setting the color
-          transform fields, and what its illumination target
-          is.</description>
-          <range>android.control.awbAvailableModes</range>
-          <details>
-          This control is only effective if android.control.mode is AUTO.
-
-          When set to the ON mode, the camera device's auto-white balance
-          routine is enabled, overriding the application's selected
-          android.colorCorrection.transform, android.colorCorrection.gains and
-          android.colorCorrection.mode. Note that when android.control.aeMode
-          is OFF, the behavior of AWB is device dependent. It is recommened to
-          also set AWB mode to OFF or lock AWB by using android.control.awbLock before
-          setting AE mode to OFF.
-
-          When set to the OFF mode, the camera device's auto-white balance
-          routine is disabled. The application manually controls the white
-          balance by android.colorCorrection.transform, android.colorCorrection.gains
-          and android.colorCorrection.mode.
-
-          When set to any other modes, the camera device's auto-white
-          balance routine is disabled. The camera device uses each
-          particular illumination target for white balance
-          adjustment. The application's values for
-          android.colorCorrection.transform,
-          android.colorCorrection.gains and
-          android.colorCorrection.mode are ignored.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="awbRegions" type="int32" visibility="public"
-               optional="true" container="array" typedef="meteringRectangle">
-          <array>
-            <size>5</size>
-            <size>area_count</size>
-          </array>
-          <description>List of metering areas to use for auto-white-balance illuminant
-          estimation.</description>
-          <units>Pixel coordinates within android.sensor.info.activeArraySize</units>
-          <range>Coordinates must be between `[(0,0), (width, height))` of
-          android.sensor.info.activeArraySize</range>
-          <details>
-              Not available if android.control.maxRegionsAwb is 0.
-              Otherwise will always be present.
-
-              The maximum number of regions supported by the device is determined by the value
-              of android.control.maxRegionsAwb.
-
-              The data representation is int[5 * area_count].
-              Every five elements represent a metering region of (xmin, ymin, xmax, ymax, weight).
-              The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
-              ymax.
-
-              The coordinate system is based on the active pixel array,
-              with (0,0) being the top-left pixel in the active pixel array, and
-              (android.sensor.info.activeArraySize.width - 1,
-              android.sensor.info.activeArraySize.height - 1) being the
-              bottom-right pixel in the active pixel array.
-
-              The weight must range from 0 to 1000, and represents a weight
-              for every pixel in the area. This means that a large metering area
-              with the same weight as a smaller area will have more effect in
-              the metering result. Metering areas can partially overlap and the
-              camera device will add the weights in the overlap region.
-
-              The weights are relative to weights of other white balance metering regions, so if
-              only one region is used, all non-zero weights will have the same effect. A region with
-              0 weight is ignored.
-
-              If all regions have 0 weight, then no specific metering area needs to be used by the
-              camera device.
-
-              If the metering region is outside the used android.scaler.cropRegion returned in
-              capture result metadata, the camera device will ignore the sections outside the crop
-              region and output only the intersection rectangle as the metering region in the result
-              metadata.  If the region is entirely outside the crop region, it will be ignored and
-              not reported in the result metadata.
-          </details>
-          <hal_details>
-              The HAL level representation of MeteringRectangle[] is a
-              int[5 * area_count].
-              Every five elements represent a metering region of
-              (xmin, ymin, xmax, ymax, weight).
-              The rectangle is defined to be inclusive on xmin and ymin, but
-              exclusive on xmax and ymax.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="captureIntent" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>CUSTOM
-            <notes>The goal of this request doesn't fall into the other
-            categories. The camera device will default to preview-like
-            behavior.</notes></value>
-            <value>PREVIEW
-            <notes>This request is for a preview-like use case.
-
-            The precapture trigger may be used to start off a metering
-            w/flash sequence.
-            </notes></value>
-            <value>STILL_CAPTURE
-            <notes>This request is for a still capture-type
-            use case.
-
-            If the flash unit is under automatic control, it may fire as needed.
-            </notes></value>
-            <value>VIDEO_RECORD
-            <notes>This request is for a video recording
-            use case.</notes></value>
-            <value>VIDEO_SNAPSHOT
-            <notes>This request is for a video snapshot (still
-            image while recording video) use case.
-
-            The camera device should take the highest-quality image
-            possible (given the other settings) without disrupting the
-            frame rate of video recording.  </notes></value>
-            <value>ZERO_SHUTTER_LAG
-            <notes>This request is for a ZSL usecase; the
-            application will stream full-resolution images and
-            reprocess one or several later for a final
-            capture.
-            </notes></value>
-            <value>MANUAL
-            <notes>This request is for manual capture use case where
-            the applications want to directly control the capture parameters.
-
-            For example, the application may wish to manually control
-            android.sensor.exposureTime, android.sensor.sensitivity, etc.
-            </notes></value>
-          </enum>
-          <description>Information to the camera device 3A (auto-exposure,
-          auto-focus, auto-white balance) routines about the purpose
-          of this capture, to help the camera device to decide optimal 3A
-          strategy.</description>
-          <details>This control (except for MANUAL) is only effective if
-          `android.control.mode != OFF` and any 3A routine is active.
-
-          ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities
-          contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
-          android.request.availableCapabilities contains MANUAL_SENSOR. Other intent values are
-          always supported.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="effectMode" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>OFF
-              <notes>
-              No color effect will be applied.
-              </notes>
-            </value>
-            <value optional="true">MONO
-              <notes>
-              A "monocolor" effect where the image is mapped into
-              a single color.
-
-              This will typically be grayscale.
-              </notes>
-            </value>
-            <value optional="true">NEGATIVE
-              <notes>
-              A "photo-negative" effect where the image's colors
-              are inverted.
-              </notes>
-            </value>
-            <value optional="true">SOLARIZE
-              <notes>
-              A "solarisation" effect (Sabattier effect) where the
-              image is wholly or partially reversed in
-              tone.
-              </notes>
-            </value>
-            <value optional="true">SEPIA
-              <notes>
-              A "sepia" effect where the image is mapped into warm
-              gray, red, and brown tones.
-              </notes>
-            </value>
-            <value optional="true">POSTERIZE
-              <notes>
-              A "posterization" effect where the image uses
-              discrete regions of tone rather than a continuous
-              gradient of tones.
-              </notes>
-            </value>
-            <value optional="true">WHITEBOARD
-              <notes>
-              A "whiteboard" effect where the image is typically displayed
-              as regions of white, with black or grey details.
-              </notes>
-            </value>
-            <value optional="true">BLACKBOARD
-              <notes>
-              A "blackboard" effect where the image is typically displayed
-              as regions of black, with white or grey details.
-              </notes>
-            </value>
-            <value optional="true">AQUA
-              <notes>
-              An "aqua" effect where a blue hue is added to the image.
-              </notes>
-            </value>
-          </enum>
-          <description>A special color effect to apply.</description>
-          <range>android.control.availableEffects</range>
-          <details>
-          When this mode is set, a color effect will be applied
-          to images produced by the camera device. The interpretation
-          and implementation of these color effects is left to the
-          implementor of the camera device, and should not be
-          depended on to be consistent (or present) across all
-          devices.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="mode" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>OFF
-            <notes>Full application control of pipeline.
-
-            All control by the device's metering and focusing (3A)
-            routines is disabled, and no other settings in
-            android.control.* have any effect, except that
-            android.control.captureIntent may be used by the camera
-            device to select post-processing values for processing
-            blocks that do not allow for manual control, or are not
-            exposed by the camera API.
-
-            However, the camera device's 3A routines may continue to
-            collect statistics and update their internal state so that
-            when control is switched to AUTO mode, good control values
-            can be immediately applied.
-            </notes></value>
-            <value>AUTO
-            <notes>Use settings for each individual 3A routine.
-
-            Manual control of capture parameters is disabled. All
-            controls in android.control.* besides sceneMode take
-            effect.</notes></value>
-            <value optional="true">USE_SCENE_MODE
-            <notes>Use a specific scene mode.
-
-            Enabling this disables control.aeMode, control.awbMode and
-            control.afMode controls; the camera device will ignore
-            those settings while USE_SCENE_MODE is active (except for
-            FACE_PRIORITY scene mode). Other control entries are still active.
-            This setting can only be used if scene mode is supported (i.e.
-            android.control.availableSceneModes
-            contain some modes other than DISABLED).</notes></value>
-            <value optional="true">OFF_KEEP_STATE
-            <notes>Same as OFF mode, except that this capture will not be
-            used by camera device background auto-exposure, auto-white balance and
-            auto-focus algorithms (3A) to update their statistics.
-
-            Specifically, the 3A routines are locked to the last
-            values set from a request with AUTO, OFF, or
-            USE_SCENE_MODE, and any statistics or state updates
-            collected from manual captures with OFF_KEEP_STATE will be
-            discarded by the camera device.
-            </notes></value>
-          </enum>
-          <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
-          routines.</description>
-          <range>android.control.availableModes</range>
-          <details>
-          This is a top-level 3A control switch. When set to OFF, all 3A control
-          by the camera device is disabled. The application must set the fields for
-          capture parameters itself.
-
-          When set to AUTO, the individual algorithm controls in
-          android.control.* are in effect, such as android.control.afMode.
-
-          When set to USE_SCENE_MODE, the individual controls in
-          android.control.* are mostly disabled, and the camera device implements
-          one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
-          as it wishes. The camera device scene mode 3A settings are provided by
-          capture results {@link ACameraMetadata} from
-          {@link ACameraCaptureSession_captureCallback_result}.
-
-          When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
-          is that this frame will not be used by camera device background 3A statistics
-          update, as if this frame is never captured. This mode can be used in the scenario
-          where the application doesn't want a 3A manual control capture to affect
-          the subsequent auto 3A capture results.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="sceneMode" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value id="0">DISABLED
-              <notes>
-              Indicates that no scene modes are set for a given capture request.
-              </notes>
-            </value>
-            <value>FACE_PRIORITY
-              <notes>If face detection support exists, use face
-              detection data for auto-focus, auto-white balance, and
-              auto-exposure routines.
-
-              If face detection statistics are disabled
-              (i.e. android.statistics.faceDetectMode is set to OFF),
-              this should still operate correctly (but will not return
-              face detection statistics to the framework).
-
-              Unlike the other scene modes, android.control.aeMode,
-              android.control.awbMode, and android.control.afMode
-              remain active when FACE_PRIORITY is set.
-              </notes>
-            </value>
-            <value optional="true">ACTION
-              <notes>
-              Optimized for photos of quickly moving objects.
-
-              Similar to SPORTS.
-              </notes>
-            </value>
-            <value optional="true">PORTRAIT
-              <notes>
-              Optimized for still photos of people.
-              </notes>
-            </value>
-            <value optional="true">LANDSCAPE
-              <notes>
-              Optimized for photos of distant macroscopic objects.
-              </notes>
-            </value>
-            <value optional="true">NIGHT
-              <notes>
-              Optimized for low-light settings.
-              </notes>
-            </value>
-            <value optional="true">NIGHT_PORTRAIT
-              <notes>
-              Optimized for still photos of people in low-light
-              settings.
-              </notes>
-            </value>
-            <value optional="true">THEATRE
-              <notes>
-              Optimized for dim, indoor settings where flash must
-              remain off.
-              </notes>
-            </value>
-            <value optional="true">BEACH
-              <notes>
-              Optimized for bright, outdoor beach settings.
-              </notes>
-            </value>
-            <value optional="true">SNOW
-              <notes>
-              Optimized for bright, outdoor settings containing snow.
-              </notes>
-            </value>
-            <value optional="true">SUNSET
-              <notes>
-              Optimized for scenes of the setting sun.
-              </notes>
-            </value>
-            <value optional="true">STEADYPHOTO
-              <notes>
-              Optimized to avoid blurry photos due to small amounts of
-              device motion (for example: due to hand shake).
-              </notes>
-            </value>
-            <value optional="true">FIREWORKS
-              <notes>
-              Optimized for nighttime photos of fireworks.
-              </notes>
-            </value>
-            <value optional="true">SPORTS
-              <notes>
-              Optimized for photos of quickly moving people.
-
-              Similar to ACTION.
-              </notes>
-            </value>
-            <value optional="true">PARTY
-              <notes>
-              Optimized for dim, indoor settings with multiple moving
-              people.
-              </notes>
-            </value>
-            <value optional="true">CANDLELIGHT
-              <notes>
-              Optimized for dim settings where the main light source
-              is a flame.
-              </notes>
-            </value>
-            <value optional="true">BARCODE
-              <notes>
-              Optimized for accurately capturing a photo of barcode
-              for use by camera applications that wish to read the
-              barcode value.
-              </notes>
-            </value>
-            <value deprecated="true" optional="true" ndk_hidden="true">HIGH_SPEED_VIDEO
-              <notes>
-              This is deprecated, please use {@link
-              android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
-              and {@link
-              android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
-              for high speed video recording.
-
-              Optimized for high speed video recording (frame rate >=60fps) use case.
-
-              The supported high speed video sizes and fps ranges are specified in
-              android.control.availableHighSpeedVideoConfigurations. To get desired
-              output frame rates, the application is only allowed to select video size
-              and fps range combinations listed in this static metadata. The fps range
-              can be control via android.control.aeTargetFpsRange.
-
-              In this mode, the camera device will override aeMode, awbMode, and afMode to
-              ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
-              controls will be overridden to be FAST. Therefore, no manual control of capture
-              and post-processing parameters is possible. All other controls operate the
-              same as when android.control.mode == AUTO. This means that all other
-              android.control.* fields continue to work, such as
-
-              * android.control.aeTargetFpsRange
-              * android.control.aeExposureCompensation
-              * android.control.aeLock
-              * android.control.awbLock
-              * android.control.effectMode
-              * android.control.aeRegions
-              * android.control.afRegions
-              * android.control.awbRegions
-              * android.control.afTrigger
-              * android.control.aePrecaptureTrigger
-
-              Outside of android.control.*, the following controls will work:
-
-              * android.flash.mode (automatic flash for still capture will not work since aeMode is ON)
-              * android.lens.opticalStabilizationMode (if it is supported)
-              * android.scaler.cropRegion
-              * android.statistics.faceDetectMode
-
-              For high speed recording use case, the actual maximum supported frame rate may
-              be lower than what camera can output, depending on the destination Surfaces for
-              the image data. For example, if the destination surface is from video encoder,
-              the application need check if the video encoder is capable of supporting the
-              high frame rate for a given video size, or it will end up with lower recording
-              frame rate. If the destination surface is from preview window, the preview frame
-              rate will be bounded by the screen refresh rate.
-
-              The camera device will only support up to 2 output high speed streams
-              (processed non-stalling format defined in android.request.maxNumOutputStreams)
-              in this mode. This control will be effective only if all of below conditions are true:
-
-              * The application created no more than maxNumHighSpeedStreams processed non-stalling
-              format output streams, where maxNumHighSpeedStreams is calculated as
-              min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
-              * The stream sizes are selected from the sizes reported by
-              android.control.availableHighSpeedVideoConfigurations.
-              * No processed non-stalling or raw streams are configured.
-
-              When above conditions are NOT satistied, the controls of this mode and
-              android.control.aeTargetFpsRange will be ignored by the camera device,
-              the camera device will fall back to android.control.mode `==` AUTO,
-              and the returned capture result metadata will give the fps range choosen
-              by the camera device.
-
-              Switching into or out of this mode may trigger some camera ISP/sensor
-              reconfigurations, which may introduce extra latency. It is recommended that
-              the application avoids unnecessary scene mode switch as much as possible.
-              </notes>
-            </value>
-            <value optional="true">HDR
-              <notes>
-              Turn on a device-specific high dynamic range (HDR) mode.
-
-              In this scene mode, the camera device captures images
-              that keep a larger range of scene illumination levels
-              visible in the final image. For example, when taking a
-              picture of a object in front of a bright window, both
-              the object and the scene through the window may be
-              visible when using HDR mode, while in normal AUTO mode,
-              one or the other may be poorly exposed. As a tradeoff,
-              HDR mode generally takes much longer to capture a single
-              image, has no user control, and may have other artifacts
-              depending on the HDR method used.
-
-              Therefore, HDR captures operate at a much slower rate
-              than regular captures.
-
-              In this mode, on LIMITED or FULL devices, when a request
-              is made with a android.control.captureIntent of
-              STILL_CAPTURE, the camera device will capture an image
-              using a high dynamic range capture technique.  On LEGACY
-              devices, captures that target a JPEG-format output will
-              be captured with HDR, and the capture intent is not
-              relevant.
-
-              The HDR capture may involve the device capturing a burst
-              of images internally and combining them into one, or it
-              may involve the device using specialized high dynamic
-              range capture hardware. In all cases, a single image is
-              produced in response to a capture request submitted
-              while in HDR mode.
-
-              Since substantial post-processing is generally needed to
-              produce an HDR image, only YUV, PRIVATE, and JPEG
-              outputs are supported for LIMITED/FULL device HDR
-              captures, and only JPEG outputs are supported for LEGACY
-              HDR captures. Using a RAW output for HDR capture is not
-              supported.
-
-              Some devices may also support always-on HDR, which
-              applies HDR processing at full frame rate.  For these
-              devices, intents other than STILL_CAPTURE will also
-              produce an HDR output with no frame rate impact compared
-              to normal operation, though the quality may be lower
-              than for STILL_CAPTURE intents.
-
-              If SCENE_MODE_HDR is used with unsupported output types
-              or capture intents, the images captured will be as if
-              the SCENE_MODE was not enabled at all.
-              </notes>
-            </value>
-            <value optional="true" hidden="true">FACE_PRIORITY_LOW_LIGHT
-              <notes>Same as FACE_PRIORITY scene mode, except that the camera
-              device will choose higher sensitivity values (android.sensor.sensitivity)
-              under low light conditions.
-
-              The camera device may be tuned to expose the images in a reduced
-              sensitivity range to produce the best quality images. For example,
-              if the android.sensor.info.sensitivityRange gives range of [100, 1600],
-              the camera device auto-exposure routine tuning process may limit the actual
-              exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
-              exessive in order to preserve the image quality. Under this situation, the image under
-              low light may be under-exposed when the sensor max exposure time (bounded by the
-              android.control.aeTargetFpsRange when android.control.aeMode is one of the
-              ON_* modes) and effective max sensitivity are reached. This scene mode allows the
-              camera device auto-exposure routine to increase the sensitivity up to the max
-              sensitivity specified by android.sensor.info.sensitivityRange when the scene is too
-              dark and the max exposure time is reached. The captured images may be noisier
-              compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
-              recommended that the application only use this scene mode when it is capable of
-              reducing the noise level of the captured images.
-
-              Unlike the other scene modes, android.control.aeMode,
-              android.control.awbMode, and android.control.afMode
-              remain active when FACE_PRIORITY_LOW_LIGHT is set.
-              </notes>
-            </value>
-            <value optional="true" hidden="true" id="100">DEVICE_CUSTOM_START
-              <notes>
-                Scene mode values within the range of
-                `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
-                customized scene modes.
-              </notes>
-            </value>
-            <value optional="true" hidden="true" id="127">DEVICE_CUSTOM_END
-              <notes>
-                Scene mode values within the range of
-                `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific
-                customized scene modes.
-              </notes>
-            </value>
-          </enum>
-          <description>
-          Control for which scene mode is currently active.
-          </description>
-          <range>android.control.availableSceneModes</range>
-          <details>
-          Scene modes are custom camera modes optimized for a certain set of conditions and
-          capture settings.
-
-          This is the mode that that is active when
-          `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will
-          disable android.control.aeMode, android.control.awbMode, and android.control.afMode
-          while in use.
-
-          The interpretation and implementation of these scene modes is left
-          to the implementor of the camera device. Their behavior will not be
-          consistent across all devices, and any given device may only implement
-          a subset of these modes.
-          </details>
-          <hal_details>
-          HAL implementations that include scene modes are expected to provide
-          the per-scene settings to use for android.control.aeMode,
-          android.control.awbMode, and android.control.afMode in
-          android.control.sceneModeOverrides.
-
-          For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes,
-          the HAL must list supported video size and fps range in
-          android.control.availableHighSpeedVideoConfigurations. For a given size, e.g.
-          1280x720, if the HAL has two different sensor configurations for normal streaming
-          mode and high speed streaming, when this scene mode is set/reset in a sequence of capture
-          requests, the HAL may have to switch between different sensor modes.
-          This mode is deprecated in HAL3.3, to support high speed video recording, please implement
-          android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO
-          capbility defined in android.request.availableCapabilities.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="videoStabilizationMode" type="byte" visibility="public"
-               enum="true" hwlevel="legacy">
-          <enum>
-            <value>OFF
-            <notes>
-              Video stabilization is disabled.
-            </notes></value>
-            <value>ON
-            <notes>
-              Video stabilization is enabled.
-            </notes></value>
-          </enum>
-          <description>Whether video stabilization is
-          active.</description>
-          <details>
-          Video stabilization automatically warps images from
-          the camera in order to stabilize motion between consecutive frames.
-
-          If enabled, video stabilization can modify the
-          android.scaler.cropRegion to keep the video stream stabilized.
-
-          Switching between different video stabilization modes may take several
-          frames to initialize, the camera device will report the current mode
-          in capture result metadata. For example, When "ON" mode is requested,
-          the video stabilization modes in the first several capture results may
-          still be "OFF", and it will become "ON" when the initialization is
-          done.
-
-          In addition, not all recording sizes or frame rates may be supported for
-          stabilization by a device that reports stabilization support. It is guaranteed
-          that an output targeting a MediaRecorder or MediaCodec will be stabilized if
-          the recording resolution is less than or equal to 1920 x 1080 (width less than
-          or equal to 1920, height less than or equal to 1080), and the recording
-          frame rate is less than or equal to 30fps.  At other sizes, the CaptureResult
-          android.control.videoStabilizationMode field will return
-          OFF if the recording output is not stabilized, or if there are no output
-          Surface types that can be stabilized.
-
-          If a camera device supports both this mode and OIS
-          (android.lens.opticalStabilizationMode), turning both modes on may
-          produce undesirable interaction, so it is recommended not to enable
-          both at the same time.
-          </details>
-          <tag id="BC" />
-        </entry>
-      </controls>
-      <static>
-        <entry name="aeAvailableAntibandingModes" type="byte" visibility="public"
-               type_notes="list of enums" container="array" typedef="enumList"
-               hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-            List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are
-            supported by this camera device.
-          </description>
-          <range>Any value listed in android.control.aeAntibandingMode</range>
-          <details>
-            Not all of the auto-exposure anti-banding modes may be
-            supported by a given camera device. This field lists the
-            valid anti-banding modes that the application may request
-            for this camera device with the
-            android.control.aeAntibandingMode control.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeAvailableModes" type="byte" visibility="public"
-               type_notes="list of enums" container="array" typedef="enumList"
-               hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-            List of auto-exposure modes for android.control.aeMode that are supported by this camera
-            device.
-          </description>
-          <range>Any value listed in android.control.aeMode</range>
-          <details>
-            Not all the auto-exposure modes may be supported by a
-            given camera device, especially if no flash unit is
-            available. This entry lists the valid modes for
-            android.control.aeMode for this camera device.
-
-            All camera devices support ON, and all camera devices with flash
-            units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.
-
-            FULL mode camera devices always support OFF mode,
-            which enables application control of camera exposure time,
-            sensitivity, and frame duration.
-
-            LEGACY mode camera devices never support OFF mode.
-            LIMITED mode devices support OFF if they support the MANUAL_SENSOR
-            capability.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public"
-               type_notes="list of pairs of frame rates"
-               container="array" typedef="rangeInt"
-               hwlevel="legacy">
-          <array>
-            <size>2</size>
-            <size>n</size>
-          </array>
-          <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by
-          this camera device.</description>
-          <units>Frames per second (FPS)</units>
-          <details>
-          For devices at the LEGACY level or above:
-
-          * For constant-framerate recording, for each normal
-          [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html), that is, a
-          [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html) that has
-          [quality](https://developer.android.com/reference/android/media/CamcorderProfile.html#quality)
-          in the range [
-          [QUALITY_LOW](https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_LOW),
-          [QUALITY_2160P](https://developer.android.com/reference/android/media/CamcorderProfile.html#QUALITY_2160P)],
-          if the profile is supported by the device and has
-          [videoFrameRate](https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate)
-          `x`, this list will always include (`x`,`x`).
-
-          * Also, a camera device must either not support any
-          [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html),
-          or support at least one
-          normal [CamcorderProfile](https://developer.android.com/reference/android/media/CamcorderProfile.html)
-          that has
-          [videoFrameRate](https://developer.android.com/reference/android/media/CamcorderProfile.html#videoFrameRate) `x` &gt;= 24.
-
-          For devices at the LIMITED level or above:
-
-          * For YUV_420_888 burst capture use case, this list will always include (`min`, `max`)
-          and (`max`, `max`) where `min` &lt;= 15 and `max` = the maximum output frame rate of the
-          maximum YUV_420_888 output size.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeCompensationRange" type="int32" visibility="public"
-               container="array" typedef="rangeInt"
-               hwlevel="legacy">
-          <array>
-            <size>2</size>
-          </array>
-          <description>Maximum and minimum exposure compensation values for
-          android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep,
-          that are supported by this camera device.</description>
-          <range>
-            Range [0,0] indicates that exposure compensation is not supported.
-
-            For LIMITED and FULL devices, range must follow below requirements if exposure
-            compensation is supported (`range != [0, 0]`):
-
-            `Min.exposure compensation * android.control.aeCompensationStep &lt;= -2 EV`
-
-            `Max.exposure compensation * android.control.aeCompensationStep &gt;= 2 EV`
-
-            LEGACY devices may support a smaller range than this.
-          </range>
-          <tag id="BC" />
-        </entry>
-        <entry name="aeCompensationStep" type="rational" visibility="public"
-               hwlevel="legacy">
-          <description>Smallest step by which the exposure compensation
-          can be changed.</description>
-          <units>Exposure Value (EV)</units>
-          <details>
-          This is the unit for android.control.aeExposureCompensation. For example, if this key has
-          a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means
-          that the target EV offset for the auto-exposure routine is -1 EV.
-
-          One unit of EV compensation changes the brightness of the captured image by a factor
-          of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.
-          </details>
-          <hal_details>
-            This must be less than or equal to 1/2.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="afAvailableModes" type="byte" visibility="public"
-               type_notes="List of enums" container="array" typedef="enumList"
-               hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of auto-focus (AF) modes for android.control.afMode that are
-          supported by this camera device.
-          </description>
-          <range>Any value listed in android.control.afMode</range>
-          <details>
-          Not all the auto-focus modes may be supported by a
-          given camera device. This entry lists the valid modes for
-          android.control.afMode for this camera device.
-
-          All LIMITED and FULL mode camera devices will support OFF mode, and all
-          camera devices with adjustable focuser units
-          (`android.lens.info.minimumFocusDistance &gt; 0`) will support AUTO mode.
-
-          LEGACY devices will support OFF mode only if they support
-          focusing to infinity (by also setting android.lens.focusDistance to
-          `0.0f`).
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableEffects" type="byte" visibility="public"
-               type_notes="List of enums (android.control.effectMode)." container="array"
-               typedef="enumList" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of color effects for android.control.effectMode that are supported by this camera
-          device.
-          </description>
-          <range>Any value listed in android.control.effectMode</range>
-          <details>
-          This list contains the color effect modes that can be applied to
-          images produced by the camera device.
-          Implementations are not expected to be consistent across all devices.
-          If no color effect modes are available for a device, this will only list
-          OFF.
-
-          A color effect will only be applied if
-          android.control.mode != OFF.  OFF is always included in this list.
-
-          This control has no effect on the operation of other control routines such
-          as auto-exposure, white balance, or focus.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableSceneModes" type="byte" visibility="public"
-               type_notes="List of enums (android.control.sceneMode)."
-               container="array" typedef="enumList" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of scene modes for android.control.sceneMode that are supported by this camera
-          device.
-          </description>
-          <range>Any value listed in android.control.sceneMode</range>
-          <details>
-          This list contains scene modes that can be set for the camera device.
-          Only scene modes that have been fully implemented for the
-          camera device may be included here. Implementations are not expected
-          to be consistent across all devices.
-
-          If no scene modes are supported by the camera device, this
-          will be set to DISABLED. Otherwise DISABLED will not be listed.
-
-          FACE_PRIORITY is always listed if face detection is
-          supported (i.e.`android.statistics.info.maxFaceCount &gt;
-          0`).
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableVideoStabilizationModes" type="byte"
-               visibility="public" type_notes="List of enums." container="array"
-               typedef="enumList" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of video stabilization modes for android.control.videoStabilizationMode
-          that are supported by this camera device.
-          </description>
-          <range>Any value listed in android.control.videoStabilizationMode</range>
-          <details>
-          OFF will always be listed.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="awbAvailableModes" type="byte" visibility="public"
-               type_notes="List of enums"
-               container="array" typedef="enumList" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of auto-white-balance modes for android.control.awbMode that are supported by this
-          camera device.
-          </description>
-          <range>Any value listed in android.control.awbMode</range>
-          <details>
-          Not all the auto-white-balance modes may be supported by a
-          given camera device. This entry lists the valid modes for
-          android.control.awbMode for this camera device.
-
-          All camera devices will support ON mode.
-
-          Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
-          mode, which enables application control of white balance, by using
-          android.colorCorrection.transform and android.colorCorrection.gains
-          (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL
-          mode camera devices.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="maxRegions" type="int32" visibility="ndk_public"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>3</size>
-          </array>
-          <description>
-          List of the maximum number of regions that can be used for metering in
-          auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
-          this corresponds to the the maximum number of elements in
-          android.control.aeRegions, android.control.awbRegions,
-          and android.control.afRegions.
-          </description>
-          <range>
-          Value must be &amp;gt;= 0 for each element. For full-capability devices
-          this value must be &amp;gt;= 1 for AE and AF. The order of the elements is:
-          `(AE, AWB, AF)`.</range>
-          <tag id="BC" />
-        </entry>
-        <entry name="maxRegionsAe" type="int32" visibility="java_public"
-               synthetic="true" hwlevel="legacy">
-          <description>
-          The maximum number of metering regions that can be used by the auto-exposure (AE)
-          routine.
-          </description>
-          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
-          value will be &amp;gt;= 1.
-          </range>
-          <details>
-          This corresponds to the the maximum allowed number of elements in
-          android.control.aeRegions.
-          </details>
-          <hal_details>This entry is private to the framework. Fill in
-          maxRegions to have this entry be automatically populated.
-          </hal_details>
-        </entry>
-        <entry name="maxRegionsAwb" type="int32" visibility="java_public"
-               synthetic="true" hwlevel="legacy">
-          <description>
-          The maximum number of metering regions that can be used by the auto-white balance (AWB)
-          routine.
-          </description>
-          <range>Value will be &amp;gt;= 0.
-          </range>
-          <details>
-          This corresponds to the the maximum allowed number of elements in
-          android.control.awbRegions.
-          </details>
-          <hal_details>This entry is private to the framework. Fill in
-          maxRegions to have this entry be automatically populated.
-          </hal_details>
-        </entry>
-        <entry name="maxRegionsAf" type="int32" visibility="java_public"
-               synthetic="true" hwlevel="legacy">
-          <description>
-          The maximum number of metering regions that can be used by the auto-focus (AF) routine.
-          </description>
-          <range>Value will be &amp;gt;= 0. For FULL-capability devices, this
-          value will be &amp;gt;= 1.
-          </range>
-          <details>
-          This corresponds to the the maximum allowed number of elements in
-          android.control.afRegions.
-          </details>
-          <hal_details>This entry is private to the framework. Fill in
-          maxRegions to have this entry be automatically populated.
-          </hal_details>
-        </entry>
-        <entry name="sceneModeOverrides" type="byte" visibility="system"
-               container="array" hwlevel="limited">
-          <array>
-            <size>3</size>
-            <size>length(availableSceneModes)</size>
-          </array>
-          <description>
-          Ordered list of auto-exposure, auto-white balance, and auto-focus
-          settings to use with each available scene mode.
-          </description>
-          <range>
-          For each available scene mode, the list must contain three
-          entries containing the android.control.aeMode,
-          android.control.awbMode, and android.control.afMode values used
-          by the camera device. The entry order is `(aeMode, awbMode, afMode)`
-          where aeMode has the lowest index position.
-          </range>
-          <details>
-          When a scene mode is enabled, the camera device is expected
-          to override android.control.aeMode, android.control.awbMode,
-          and android.control.afMode with its preferred settings for
-          that scene mode.
-
-          The order of this list matches that of availableSceneModes,
-          with 3 entries for each mode.  The overrides listed
-          for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored,
-          since for that mode the application-set android.control.aeMode,
-          android.control.awbMode, and android.control.afMode values are
-          used instead, matching the behavior when android.control.mode
-          is set to AUTO. It is recommended that the FACE_PRIORITY and
-          FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0.
-
-          For example, if availableSceneModes contains
-          `(FACE_PRIORITY, ACTION, NIGHT)`,  then the camera framework
-          expects sceneModeOverrides to have 9 entries formatted like:
-          `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE,
-          ON_AUTO_FLASH, INCANDESCENT, AUTO)`.
-          </details>
-          <hal_details>
-          To maintain backward compatibility, this list will be made available
-          in the static metadata of the camera service.  The camera service will
-          use these values to set android.control.aeMode,
-          android.control.awbMode, and android.control.afMode when using a scene
-          mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported).
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-      </static>
-      <dynamic>
-        <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true">
-          <description>The ID sent with the latest
-          CAMERA2_TRIGGER_PRECAPTURE_METERING call</description>
-          <details>Must be 0 if no
-          CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
-          by HAL. Always updated even if AE algorithm ignores the
-          trigger</details>
-        </entry>
-        <clone entry="android.control.aeAntibandingMode" kind="controls">
-        </clone>
-        <clone entry="android.control.aeExposureCompensation" kind="controls">
-        </clone>
-        <clone entry="android.control.aeLock" kind="controls">
-        </clone>
-        <clone entry="android.control.aeMode" kind="controls">
-        </clone>
-        <clone entry="android.control.aeRegions" kind="controls">
-        </clone>
-        <clone entry="android.control.aeTargetFpsRange" kind="controls">
-        </clone>
-        <clone entry="android.control.aePrecaptureTrigger" kind="controls">
-        </clone>
-        <entry name="aeState" type="byte" visibility="public" enum="true"
-               hwlevel="limited">
-          <enum>
-            <value>INACTIVE
-            <notes>AE is off or recently reset.
-
-            When a camera device is opened, it starts in
-            this state. This is a transient state, the camera device may skip reporting
-            this state in capture result.</notes></value>
-            <value>SEARCHING
-            <notes>AE doesn't yet have a good set of control values
-            for the current scene.
-
-            This is a transient state, the camera device may skip
-            reporting this state in capture result.</notes></value>
-            <value>CONVERGED
-            <notes>AE has a good set of control values for the
-            current scene.</notes></value>
-            <value>LOCKED
-            <notes>AE has been locked.</notes></value>
-            <value>FLASH_REQUIRED
-            <notes>AE has a good set of control values, but flash
-            needs to be fired for good quality still
-            capture.</notes></value>
-            <value>PRECAPTURE
-            <notes>AE has been asked to do a precapture sequence
-            and is currently executing it.
-
-            Precapture can be triggered through setting
-            android.control.aePrecaptureTrigger to START. Currently
-            active and completed (if it causes camera device internal AE lock) precapture
-            metering sequence can be canceled through setting
-            android.control.aePrecaptureTrigger to CANCEL.
-
-            Once PRECAPTURE completes, AE will transition to CONVERGED
-            or FLASH_REQUIRED as appropriate. This is a transient
-            state, the camera device may skip reporting this state in
-            capture result.</notes></value>
-          </enum>
-          <description>Current state of the auto-exposure (AE) algorithm.</description>
-          <details>Switching between or enabling AE modes (android.control.aeMode) always
-          resets the AE state to INACTIVE. Similarly, switching between android.control.mode,
-          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
-          the algorithm states to INACTIVE.
-
-          The camera device can do several state transitions between two results, if it is
-          allowed by the state transition table. For example: INACTIVE may never actually be
-          seen in a result.
-
-          The state in the result is the state for this image (in sync with this image): if
-          AE state becomes CONVERGED, then the image data associated with this result should
-          be good to use.
-
-          Below are state transition tables for different AE modes.
-
-            State       | Transition Cause | New State | Notes
-          :------------:|:----------------:|:---------:|:-----------------------:
-          INACTIVE      |                  | INACTIVE  | Camera device auto exposure algorithm is disabled
-
-          When android.control.aeMode is AE_MODE_ON_*:
-
-            State        | Transition Cause                             | New State      | Notes
-          :-------------:|:--------------------------------------------:|:--------------:|:-----------------:
-          INACTIVE       | Camera device initiates AE scan              | SEARCHING      | Values changing
-          INACTIVE       | android.control.aeLock is ON                 | LOCKED         | Values locked
-          SEARCHING      | Camera device finishes AE scan               | CONVERGED      | Good values, not changing
-          SEARCHING      | Camera device finishes AE scan               | FLASH_REQUIRED | Converged but too dark w/o flash
-          SEARCHING      | android.control.aeLock is ON                 | LOCKED         | Values locked
-          CONVERGED      | Camera device initiates AE scan              | SEARCHING      | Values changing
-          CONVERGED      | android.control.aeLock is ON                 | LOCKED         | Values locked
-          FLASH_REQUIRED | Camera device initiates AE scan              | SEARCHING      | Values changing
-          FLASH_REQUIRED | android.control.aeLock is ON                 | LOCKED         | Values locked
-          LOCKED         | android.control.aeLock is OFF                | SEARCHING      | Values not good after unlock
-          LOCKED         | android.control.aeLock is OFF                | CONVERGED      | Values good after unlock
-          LOCKED         | android.control.aeLock is OFF                | FLASH_REQUIRED | Exposure good, but too dark
-          PRECAPTURE     | Sequence done. android.control.aeLock is OFF | CONVERGED      | Ready for high-quality capture
-          PRECAPTURE     | Sequence done. android.control.aeLock is ON  | LOCKED         | Ready for high-quality capture
-          LOCKED         | aeLock is ON and aePrecaptureTrigger is START | LOCKED        | Precapture trigger is ignored when AE is already locked
-          LOCKED         | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED        | Precapture trigger is ignored when AE is already locked
-          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE     | Start AE precapture metering sequence
-          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE       | Currently active precapture metering sequence is canceled
-
-          For the above table, the camera device may skip reporting any state changes that happen
-          without application intervention (i.e. mode switch, trigger, locking). Any state that
-          can be skipped in that manner is called a transient state.
-
-          For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
-          listed in above table, it is also legal for the camera device to skip one or more
-          transient states between two results. See below table for examples:
-
-            State        | Transition Cause                                            | New State      | Notes
-          :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------:
-          INACTIVE       | Camera device finished AE scan                              | CONVERGED      | Values are already good, transient states are skipped by camera device.
-          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.
-          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED      | Converged after a precapture sequence, transient states are skipped by camera device.
-          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.
-          Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged    | CONVERGED      | Converged after a precapture sequenceis canceled, transient states are skipped by camera device.
-          CONVERGED      | Camera device finished AE scan                              | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.
-          FLASH_REQUIRED | Camera device finished AE scan                              | CONVERGED      | Converged after a new scan, transient states are skipped by camera device.
-          </details>
-        </entry>
-        <clone entry="android.control.afMode" kind="controls">
-        </clone>
-        <clone entry="android.control.afRegions" kind="controls">
-        </clone>
-        <clone entry="android.control.afTrigger" kind="controls">
-        </clone>
-        <entry name="afState" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>INACTIVE
-            <notes>AF is off or has not yet tried to scan/been asked
-            to scan.
-
-            When a camera device is opened, it starts in this
-            state. This is a transient state, the camera device may
-            skip reporting this state in capture
-            result.</notes></value>
-            <value>PASSIVE_SCAN
-            <notes>AF is currently performing an AF scan initiated the
-            camera device in a continuous autofocus mode.
-
-            Only used by CONTINUOUS_* AF modes. This is a transient
-            state, the camera device may skip reporting this state in
-            capture result.</notes></value>
-            <value>PASSIVE_FOCUSED
-            <notes>AF currently believes it is in focus, but may
-            restart scanning at any time.
-
-            Only used by CONTINUOUS_* AF modes. This is a transient
-            state, the camera device may skip reporting this state in
-            capture result.</notes></value>
-            <value>ACTIVE_SCAN
-            <notes>AF is performing an AF scan because it was
-            triggered by AF trigger.
-
-            Only used by AUTO or MACRO AF modes. This is a transient
-            state, the camera device may skip reporting this state in
-            capture result.</notes></value>
-            <value>FOCUSED_LOCKED
-            <notes>AF believes it is focused correctly and has locked
-            focus.
-
-            This state is reached only after an explicit START AF trigger has been
-            sent (android.control.afTrigger), when good focus has been obtained.
-
-            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
-            a new AF trigger is sent to the camera device (android.control.afTrigger).
-            </notes></value>
-            <value>NOT_FOCUSED_LOCKED
-            <notes>AF has failed to focus successfully and has locked
-            focus.
-
-            This state is reached only after an explicit START AF trigger has been
-            sent (android.control.afTrigger), when good focus cannot be obtained.
-
-            The lens will remain stationary until the AF mode (android.control.afMode) is changed or
-            a new AF trigger is sent to the camera device (android.control.afTrigger).
-            </notes></value>
-            <value>PASSIVE_UNFOCUSED
-            <notes>AF finished a passive scan without finding focus,
-            and may restart scanning at any time.
-
-            Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
-            device may skip reporting this state in capture result.
-
-            LEGACY camera devices do not support this state. When a passive
-            scan has finished, it will always go to PASSIVE_FOCUSED.
-            </notes></value>
-          </enum>
-          <description>Current state of auto-focus (AF) algorithm.</description>
-          <details>
-          Switching between or enabling AF modes (android.control.afMode) always
-          resets the AF state to INACTIVE. Similarly, switching between android.control.mode,
-          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
-          the algorithm states to INACTIVE.
-
-          The camera device can do several state transitions between two results, if it is
-          allowed by the state transition table. For example: INACTIVE may never actually be
-          seen in a result.
-
-          The state in the result is the state for this image (in sync with this image): if
-          AF state becomes FOCUSED, then the image data associated with this result should
-          be sharp.
-
-          Below are state transition tables for different AF modes.
-
-          When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF:
-
-            State       | Transition Cause | New State | Notes
-          :------------:|:----------------:|:---------:|:-----------:
-          INACTIVE      |                  | INACTIVE  | Never changes
-
-          When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO:
-
-            State            | Transition Cause | New State          | Notes
-          :-----------------:|:----------------:|:------------------:|:--------------:
-          INACTIVE           | AF_TRIGGER       | ACTIVE_SCAN        | Start AF sweep, Lens now moving
-          ACTIVE_SCAN        | AF sweep done    | FOCUSED_LOCKED     | Focused, Lens now locked
-          ACTIVE_SCAN        | AF sweep done    | NOT_FOCUSED_LOCKED | Not focused, Lens now locked
-          ACTIVE_SCAN        | AF_CANCEL        | INACTIVE           | Cancel/reset AF, Lens now locked
-          FOCUSED_LOCKED     | AF_CANCEL        | INACTIVE           | Cancel/reset AF
-          FOCUSED_LOCKED     | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
-          NOT_FOCUSED_LOCKED | AF_CANCEL        | INACTIVE           | Cancel/reset AF
-          NOT_FOCUSED_LOCKED | AF_TRIGGER       | ACTIVE_SCAN        | Start new sweep, Lens now moving
-          Any state          | Mode change      | INACTIVE           |
-
-          For the above table, the camera device may skip reporting any state changes that happen
-          without application intervention (i.e. mode switch, trigger, locking). Any state that
-          can be skipped in that manner is called a transient state.
-
-          For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
-          state transitions listed in above table, it is also legal for the camera device to skip
-          one or more transient states between two results. See below table for examples:
-
-            State            | Transition Cause | New State          | Notes
-          :-----------------:|:----------------:|:------------------:|:--------------:
-          INACTIVE           | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
-          INACTIVE           | AF_TRIGGER       | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked.
-          FOCUSED_LOCKED     | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is already good or good after a scan, lens is now locked.
-          NOT_FOCUSED_LOCKED | AF_TRIGGER       | FOCUSED_LOCKED     | Focus is good after a scan, lens is not locked.
-
-
-          When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO:
-
-            State            | Transition Cause                    | New State          | Notes
-          :-----------------:|:-----------------------------------:|:------------------:|:--------------:
-          INACTIVE           | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
-          INACTIVE           | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
-          PASSIVE_SCAN       | Camera device completes current scan| PASSIVE_FOCUSED    | End AF scan, Lens now locked
-          PASSIVE_SCAN       | Camera device fails current scan    | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
-          PASSIVE_SCAN       | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, if focus is good. Lens now locked
-          PASSIVE_SCAN       | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked
-          PASSIVE_SCAN       | AF_CANCEL                           | INACTIVE           | Reset lens position, Lens now locked
-          PASSIVE_FOCUSED    | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
-          PASSIVE_UNFOCUSED  | Camera device initiates new scan    | PASSIVE_SCAN       | Start AF scan, Lens now moving
-          PASSIVE_FOCUSED    | AF_TRIGGER                          | FOCUSED_LOCKED     | Immediate transition, lens now locked
-          PASSIVE_UNFOCUSED  | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked
-          FOCUSED_LOCKED     | AF_TRIGGER                          | FOCUSED_LOCKED     | No effect
-          FOCUSED_LOCKED     | AF_CANCEL                           | INACTIVE           | Restart AF scan
-          NOT_FOCUSED_LOCKED | AF_TRIGGER                          | NOT_FOCUSED_LOCKED | No effect
-          NOT_FOCUSED_LOCKED | AF_CANCEL                           | INACTIVE           | Restart AF scan
-
-          When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE:
-
-            State            | Transition Cause                     | New State          | Notes
-          :-----------------:|:------------------------------------:|:------------------:|:--------------:
-          INACTIVE           | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
-          INACTIVE           | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | AF state query, Lens now locked
-          PASSIVE_SCAN       | Camera device completes current scan | PASSIVE_FOCUSED    | End AF scan, Lens now locked
-          PASSIVE_SCAN       | Camera device fails current scan     | PASSIVE_UNFOCUSED  | End AF scan, Lens now locked
-          PASSIVE_SCAN       | AF_TRIGGER                           | FOCUSED_LOCKED     | Eventual transition once the focus is good. Lens now locked
-          PASSIVE_SCAN       | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked
-          PASSIVE_SCAN       | AF_CANCEL                            | INACTIVE           | Reset lens position, Lens now locked
-          PASSIVE_FOCUSED    | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
-          PASSIVE_UNFOCUSED  | Camera device initiates new scan     | PASSIVE_SCAN       | Start AF scan, Lens now moving
-          PASSIVE_FOCUSED    | AF_TRIGGER                           | FOCUSED_LOCKED     | Immediate trans. Lens now locked
-          PASSIVE_UNFOCUSED  | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked
-          FOCUSED_LOCKED     | AF_TRIGGER                           | FOCUSED_LOCKED     | No effect
-          FOCUSED_LOCKED     | AF_CANCEL                            | INACTIVE           | Restart AF scan
-          NOT_FOCUSED_LOCKED | AF_TRIGGER                           | NOT_FOCUSED_LOCKED | No effect
-          NOT_FOCUSED_LOCKED | AF_CANCEL                            | INACTIVE           | Restart AF scan
-
-          When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
-          (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
-          camera device. When a trigger is included in a mode switch request, the trigger
-          will be evaluated in the context of the new mode in the request.
-          See below table for examples:
-
-            State      | Transition Cause                       | New State                                | Notes
-          :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------:
-          any state    | CAF-->AUTO mode switch                 | INACTIVE                                 | Mode switch without trigger, initial state must be INACTIVE
-          any state    | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE   | Mode switch with trigger, INACTIVE is skipped
-          any state    | AUTO-->CAF mode switch                 | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped
-          </details>
-        </entry>
-        <entry name="afTriggerId" type="int32" visibility="system" deprecated="true">
-          <description>The ID sent with the latest
-          CAMERA2_TRIGGER_AUTOFOCUS call</description>
-          <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
-          received yet by HAL. Always updated even if AF algorithm
-          ignores the trigger</details>
-        </entry>
-        <clone entry="android.control.awbLock" kind="controls">
-        </clone>
-        <clone entry="android.control.awbMode" kind="controls">
-        </clone>
-        <clone entry="android.control.awbRegions" kind="controls">
-        </clone>
-        <clone entry="android.control.captureIntent" kind="controls">
-        </clone>
-        <entry name="awbState" type="byte" visibility="public" enum="true"
-               hwlevel="limited">
-          <enum>
-            <value>INACTIVE
-            <notes>AWB is not in auto mode, or has not yet started metering.
-
-            When a camera device is opened, it starts in this
-            state. This is a transient state, the camera device may
-            skip reporting this state in capture
-            result.</notes></value>
-            <value>SEARCHING
-            <notes>AWB doesn't yet have a good set of control
-            values for the current scene.
-
-            This is a transient state, the camera device
-            may skip reporting this state in capture result.</notes></value>
-            <value>CONVERGED
-            <notes>AWB has a good set of control values for the
-            current scene.</notes></value>
-            <value>LOCKED
-            <notes>AWB has been locked.
-            </notes></value>
-          </enum>
-          <description>Current state of auto-white balance (AWB) algorithm.</description>
-          <details>Switching between or enabling AWB modes (android.control.awbMode) always
-          resets the AWB state to INACTIVE. Similarly, switching between android.control.mode,
-          or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all
-          the algorithm states to INACTIVE.
-
-          The camera device can do several state transitions between two results, if it is
-          allowed by the state transition table. So INACTIVE may never actually be seen in
-          a result.
-
-          The state in the result is the state for this image (in sync with this image): if
-          AWB state becomes CONVERGED, then the image data associated with this result should
-          be good to use.
-
-          Below are state transition tables for different AWB modes.
-
-          When `android.control.awbMode != AWB_MODE_AUTO`:
-
-            State       | Transition Cause | New State | Notes
-          :------------:|:----------------:|:---------:|:-----------------------:
-          INACTIVE      |                  |INACTIVE   |Camera device auto white balance algorithm is disabled
-
-          When android.control.awbMode is AWB_MODE_AUTO:
-
-            State        | Transition Cause                 | New State     | Notes
-          :-------------:|:--------------------------------:|:-------------:|:-----------------:
-          INACTIVE       | Camera device initiates AWB scan | SEARCHING     | Values changing
-          INACTIVE       | android.control.awbLock is ON    | LOCKED        | Values locked
-          SEARCHING      | Camera device finishes AWB scan  | CONVERGED     | Good values, not changing
-          SEARCHING      | android.control.awbLock is ON    | LOCKED        | Values locked
-          CONVERGED      | Camera device initiates AWB scan | SEARCHING     | Values changing
-          CONVERGED      | android.control.awbLock is ON    | LOCKED        | Values locked
-          LOCKED         | android.control.awbLock is OFF   | SEARCHING     | Values not good after unlock
-
-          For the above table, the camera device may skip reporting any state changes that happen
-          without application intervention (i.e. mode switch, trigger, locking). Any state that
-          can be skipped in that manner is called a transient state.
-
-          For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
-          listed in above table, it is also legal for the camera device to skip one or more
-          transient states between two results. See below table for examples:
-
-            State        | Transition Cause                 | New State     | Notes
-          :-------------:|:--------------------------------:|:-------------:|:-----------------:
-          INACTIVE       | Camera device finished AWB scan  | CONVERGED     | Values are already good, transient states are skipped by camera device.
-          LOCKED         | android.control.awbLock is OFF   | CONVERGED     | Values good after unlock, transient states are skipped by camera device.
-          </details>
-        </entry>
-        <clone entry="android.control.effectMode" kind="controls">
-        </clone>
-        <clone entry="android.control.mode" kind="controls">
-        </clone>
-        <clone entry="android.control.sceneMode" kind="controls">
-        </clone>
-        <clone entry="android.control.videoStabilizationMode" kind="controls">
-        </clone>
-      </dynamic>
-      <static>
-        <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden"
-               container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited">
-          <array>
-            <size>5</size>
-            <size>n</size>
-          </array>
-          <description>
-          List of available high speed video size, fps range and max batch size configurations
-          supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).
-          </description>
-          <range>
-          For each configuration, the fps_max &amp;gt;= 120fps.
-          </range>
-          <details>
-          When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities,
-          this metadata will list the supported high speed video size, fps range and max batch size
-          configurations. All the sizes listed in this configuration will be a subset of the sizes
-          reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes}
-          for processed non-stalling formats.
-
-          For the high speed video use case, the application must
-          select the video size and fps range from this metadata to configure the recording and
-          preview streams and setup the recording requests. For example, if the application intends
-          to do high speed recording, it can select the maximum size reported by this metadata to
-          configure output streams. Once the size is selected, application can filter this metadata
-          by selected size and get the supported fps ranges, and use these fps ranges to setup the
-          recording requests. Note that for the use case of multiple output streams, application
-          must select one unique size from this metadata to use (e.g., preview and recording streams
-          must have the same size). Otherwise, the high speed capture session creation will fail.
-
-          The min and max fps will be multiple times of 30fps.
-
-          High speed video streaming extends significant performance pressue to camera hardware,
-          to achieve efficient high speed streaming, the camera device may have to aggregate
-          multiple frames together and send to camera device for processing where the request
-          controls are same for all the frames in this batch. Max batch size indicates
-          the max possible number of frames the camera device will group together for this high
-          speed stream configuration. This max batch size will be used to generate a high speed
-          recording request list by
-          {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
-          The max batch size for each configuration will satisfy below conditions:
-
-          * Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
-          if max_fps is 300, max batch size will only be 1, 2, 5, or 10.
-          * The camera device may choose smaller internal batch size for each configuration, but
-          the actual batch size will be a divisor of max batch size. For example, if the max batch
-          size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.
-          * The max batch size in each configuration entry must be no larger than 32.
-
-          The camera device doesn't have to support batch mode to achieve high speed video recording,
-          in such case, batch_size_max will be reported as 1 in each configuration entry.
-
-          This fps ranges in this configuration list can only be used to create requests
-          that are submitted to a high speed camera capture session created by
-          {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}.
-          The fps ranges reported in this metadata must not be used to setup capture requests for
-          normal capture session, or it will cause request error.
-          </details>
-          <hal_details>
-          All the sizes listed in this configuration will be a subset of the sizes reported by
-          android.scaler.availableStreamConfigurations for processed non-stalling output formats.
-          Note that for all high speed video configurations, HAL must be able to support a minimum
-          of two streams, though the application might choose to configure just one stream.
-
-          The HAL may support multiple sensor modes for high speed outputs, for example, 120fps
-          sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application
-          usually starts preview first, then starts recording. To avoid sensor mode switch caused
-          stutter when starting recording as much as possible, the application may want to ensure
-          the same sensor mode is used for preview and recording. Therefore, The HAL must advertise
-          the variable fps range [30, fps_max] for each fixed fps range in this configuration list.
-          For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise
-          [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to
-          do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start
-          recording. For these variable fps ranges, it's up to the HAL to decide the actual fps
-          values that are suitable for smooth preview streaming. If the HAL sees different max_fps
-          values that fall into different sensor modes in a sequence of requests, the HAL must
-          switch the sensor mode as quick as possible to minimize the mode switch caused stutter.
-          </hal_details>
-          <tag id="V1" />
-        </entry>
-        <entry name="aeLockAvailable" type="byte" visibility="public" enum="true"
-               typedef="boolean" hwlevel="legacy">
-          <enum>
-            <value>FALSE</value>
-            <value>TRUE</value>
-          </enum>
-          <description>Whether the camera device supports android.control.aeLock</description>
-          <details>
-              Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
-              list `true`. This includes FULL devices.
-          </details>
-          <tag id="BC"/>
-        </entry>
-        <entry name="awbLockAvailable" type="byte" visibility="public" enum="true"
-               typedef="boolean" hwlevel="legacy">
-          <enum>
-            <value>FALSE</value>
-            <value>TRUE</value>
-          </enum>
-          <description>Whether the camera device supports android.control.awbLock</description>
-          <details>
-              Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
-              always list `true`. This includes FULL devices.
-          </details>
-          <tag id="BC"/>
-        </entry>
-        <entry name="availableModes" type="byte" visibility="public"
-            type_notes="List of enums (android.control.mode)." container="array"
-            typedef="enumList" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of control modes for android.control.mode that are supported by this camera
-          device.
-          </description>
-          <range>Any value listed in android.control.mode</range>
-          <details>
-              This list contains control modes that can be set for the camera device.
-              LEGACY mode devices will always support AUTO mode. LIMITED and FULL
-              devices will always support OFF, AUTO modes.
-          </details>
-        </entry>
-        <entry name="postRawSensitivityBoostRange" type="int32" visibility="public"
-            type_notes="Range of supported post RAW sensitivitiy boosts"
-            container="array" typedef="rangeInt">
-          <array>
-            <size>2</size>
-          </array>
-          <description>Range of boosts for android.control.postRawSensitivityBoost supported
-            by this camera device.
-          </description>
-          <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
-          <details>
-            Devices support post RAW sensitivity boost  will advertise
-            android.control.postRawSensitivityBoost key for controling
-            post RAW sensitivity boost.
-
-            This key will be `null` for devices that do not support any RAW format
-            outputs. For devices that do support RAW format outputs, this key will always
-            present, and if a device does not support post RAW sensitivity boost, it will
-            list `(100, 100)` in this key.
-          </details>
-          <hal_details>
-             This key is added in HAL3.4. For HAL3.3 or earlier devices, camera framework will
-             generate this key as `(100, 100)` if device supports any of RAW output formats.
-             All HAL3.4 and above devices should list this key if device supports any of RAW
-             output formats.
-          </hal_details>
-        </entry>
-      </static>
-      <controls>
-        <entry name="postRawSensitivityBoost" type="int32" visibility="public">
-          <description>The amount of additional sensitivity boost applied to output images
-             after RAW sensor data is captured.
-          </description>
-          <units>ISO arithmetic units, the same as android.sensor.sensitivity</units>
-          <range>android.control.postRawSensitivityBoostRange</range>
-          <details>
-          Some camera devices support additional digital sensitivity boosting in the
-          camera processing pipeline after sensor RAW image is captured.
-          Such a boost will be applied to YUV/JPEG format output images but will not
-          have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.
-
-          This key will be `null` for devices that do not support any RAW format
-          outputs. For devices that do support RAW format outputs, this key will always
-          present, and if a device does not support post RAW sensitivity boost, it will
-          list `100` in this key.
-
-          If the camera device cannot apply the exact boost requested, it will reduce the
-          boost to the nearest supported value.
-          The final boost value used will be available in the output capture result.
-
-          For devices that support post RAW sensitivity boost, the YUV/JPEG output images
-          of such device will have the total sensitivity of
-          `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100`
-          The sensitivity of RAW format images will always be `android.sensor.sensitivity`
-
-          This control is only effective if android.control.aeMode or android.control.mode is set to
-          OFF; otherwise the auto-exposure algorithm will override this value.
-          </details>
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.control.postRawSensitivityBoost" kind="controls">
-        </clone>
-      </dynamic>
-      <controls>
-        <entry name="enableZsl" type="byte" visibility="public" enum="true" typedef="boolean">
-          <enum>
-            <value>FALSE
-            <notes>Requests with android.control.captureIntent == STILL_CAPTURE must be captured
-              after previous requests.</notes></value>
-            <value>TRUE
-            <notes>Requests with android.control.captureIntent == STILL_CAPTURE may or may not be
-              captured before previous requests.</notes></value>
-          </enum>
-          <description>Allow camera device to enable zero-shutter-lag mode for requests with
-            android.control.captureIntent == STILL_CAPTURE.
-          </description>
-          <details>
-          If enableZsl is `true`, the camera device may enable zero-shutter-lag mode for requests with
-          STILL_CAPTURE capture intent. The camera device may use images captured in the past to
-          produce output images for a zero-shutter-lag request. The result metadata including the
-          android.sensor.timestamp reflects the source frames used to produce output images.
-          Therefore, the contents of the output images and the result metadata may be out of order
-          compared to previous regular requests. enableZsl does not affect requests with other
-          capture intents.
-
-          For example, when requests are submitted in the following order:
-            Request A: enableZsl is `true`, android.control.captureIntent is PREVIEW
-            Request B: enableZsl is `true`, android.control.captureIntent is STILL_CAPTURE
-
-          The output images for request B may have contents captured before the output images for
-          request A, and the result metadata for request B may be older than the result metadata for
-          request A.
-
-          Note that when enableZsl is `true`, it is not guaranteed to get output images captured in the
-          past for requests with STILL_CAPTURE capture intent.
-
-          For applications targeting SDK versions O and newer, the value of enableZsl in
-          TEMPLATE_STILL_CAPTURE template may be `true`. The value in other templates is always
-          `false` if present.
-
-          For applications targeting SDK versions older than O, the value of enableZsl in all
-          capture templates is always `false` if present.
-
-          For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
-          </details>
-          <hal_details>
-          It is valid for HAL to produce regular output images for requests with STILL_CAPTURE
-          capture intent.
-          </hal_details>
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.control.enableZsl" kind="controls">
-        </clone>
-      </dynamic>
-    </section>
-    <section name="demosaic">
-      <controls>
-        <entry name="mode" type="byte" enum="true">
-          <enum>
-            <value>FAST
-            <notes>Minimal or no slowdown of frame rate compared to
-            Bayer RAW output.</notes></value>
-            <value>HIGH_QUALITY
-            <notes>Improved processing quality but the frame rate might be slowed down
-            relative to raw output.</notes></value>
-          </enum>
-          <description>Controls the quality of the demosaicing
-          processing.</description>
-          <tag id="FUTURE" />
-        </entry>
-      </controls>
-    </section>
-    <section name="edge">
-      <controls>
-        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
-          <enum>
-            <value>OFF
-            <notes>No edge enhancement is applied.</notes></value>
-            <value>FAST
-            <notes>Apply edge enhancement at a quality level that does not slow down frame rate
-            relative to sensor output. It may be the same as OFF if edge enhancement will
-            slow down frame rate relative to sensor.</notes></value>
-            <value>HIGH_QUALITY
-            <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.
-            </notes></value>
-            <value optional="true">ZERO_SHUTTER_LAG
-            <notes>Edge enhancement is applied at different levels for different output streams,
-            based on resolution. Streams at maximum recording resolution (see {@link
-            ACameraDevice_createCaptureSession}) or below have
-            edge enhancement applied, while higher-resolution streams have no edge enhancement
-            applied. The level of edge enhancement for low-resolution streams is tuned so that
-            frame rate is not impacted, and the quality is equal to or better than FAST (since it
-            is only applied to lower-resolution outputs, quality may improve from FAST).
-
-            This mode is intended to be used by applications operating in a zero-shutter-lag mode
-            with YUV or PRIVATE reprocessing, where the application continuously captures
-            high-resolution intermediate buffers into a circular buffer, from which a final image is
-            produced via reprocessing when a user takes a picture.  For such a use case, the
-            high-resolution buffers must not have edge enhancement applied to maximize efficiency of
-            preview and to avoid double-applying enhancement when reprocessed, while low-resolution
-            buffers (used for recording or preview, generally) need edge enhancement applied for
-            reasonable preview quality.
-
-            This mode is guaranteed to be supported by devices that support either the
-            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
-            (android.request.availableCapabilities lists either of those capabilities) and it will
-            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
-            </notes></value>
-          </enum>
-          <description>Operation mode for edge
-          enhancement.</description>
-          <range>android.edge.availableEdgeModes</range>
-          <details>Edge enhancement improves sharpness and details in the captured image. OFF means
-          no enhancement will be applied by the camera device.
-
-          FAST/HIGH_QUALITY both mean camera device determined enhancement
-          will be applied. HIGH_QUALITY mode indicates that the
-          camera device will use the highest-quality enhancement algorithms,
-          even if it slows down capture rate. FAST means the camera device will
-          not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
-          edge enhancement will slow down capture rate. Every output stream will have a similar
-          amount of enhancement applied.
-
-          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
-          buffer of high-resolution images during preview and reprocess image(s) from that buffer
-          into a final capture when triggered by the user. In this mode, the camera device applies
-          edge enhancement to low-resolution streams (below maximum recording resolution) to
-          maximize preview quality, but does not apply edge enhancement to high-resolution streams,
-          since those will be reprocessed later if necessary.
-
-          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
-          device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
-          The camera device may adjust its internal edge enhancement parameters for best
-          image quality based on the android.reprocess.effectiveExposureFactor, if it is set.
-          </details>
-          <hal_details>
-          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
-          adjust the internal edge enhancement reduction parameters appropriately to get the best
-          quality images.
-          </hal_details>
-          <tag id="V1" />
-          <tag id="REPROC" />
-        </entry>
-        <entry name="strength" type="byte">
-          <description>Control the amount of edge enhancement
-          applied to the images</description>
-          <units>1-10; 10 is maximum sharpening</units>
-          <tag id="FUTURE" />
-        </entry>
-      </controls>
-      <static>
-        <entry name="availableEdgeModes" type="byte" visibility="public"
-               type_notes="list of enums" container="array" typedef="enumList"
-               hwlevel="full">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of edge enhancement modes for android.edge.mode that are supported by this camera
-          device.
-          </description>
-          <range>Any value listed in android.edge.mode</range>
-          <details>
-          Full-capability camera devices must always support OFF; camera devices that support
-          YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
-          list FAST.
-          </details>
-          <hal_details>
-          HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available
-          on the camera device, but the underlying implementation can be the same for both modes.
-          That is, if the highest quality implementation on the camera device does not slow down
-          capture rate, then FAST and HIGH_QUALITY will generate the same output.
-          </hal_details>
-          <tag id="V1" />
-          <tag id="REPROC" />
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.edge.mode" kind="controls">
-          <tag id="V1" />
-          <tag id="REPROC" />
-        </clone>
-      </dynamic>
-    </section>
-    <section name="flash">
-      <controls>
-        <entry name="firingPower" type="byte">
-          <description>Power for flash firing/torch</description>
-          <units>10 is max power; 0 is no flash. Linear</units>
-          <range>0 - 10</range>
-          <details>Power for snapshot may use a different scale than
-          for torch mode. Only one entry for torch mode will be
-          used</details>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="firingTime" type="int64">
-          <description>Firing time of flash relative to start of
-          exposure</description>
-          <units>nanoseconds</units>
-          <range>0-(exposure time-flash duration)</range>
-          <details>Clamped to (0, exposure time - flash
-          duration).</details>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy">
-          <enum>
-            <value>OFF
-              <notes>
-              Do not fire the flash for this capture.
-              </notes>
-            </value>
-            <value>SINGLE
-              <notes>
-              If the flash is available and charged, fire flash
-              for this capture.
-              </notes>
-            </value>
-            <value>TORCH
-              <notes>
-              Transition flash to continuously on.
-              </notes>
-            </value>
-          </enum>
-          <description>The desired mode for for the camera device's flash control.</description>
-          <details>
-          This control is only effective when flash unit is available
-          (`android.flash.info.available == true`).
-
-          When this control is used, the android.control.aeMode must be set to ON or OFF.
-          Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
-          ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.
-
-          When set to OFF, the camera device will not fire flash for this capture.
-
-          When set to SINGLE, the camera device will fire flash regardless of the camera
-          device's auto-exposure routine's result. When used in still capture case, this
-          control should be used along with auto-exposure (AE) precapture metering sequence
-          (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed.
-
-          When set to TORCH, the flash will be on continuously. This mode can be used
-          for use cases such as preview, auto-focus assist, still capture, or video recording.
-
-          The flash status will be reported by android.flash.state in the capture result metadata.
-          </details>
-          <tag id="BC" />
-        </entry>
-      </controls>
-      <static>
-        <namespace name="info">
-          <entry name="available" type="byte" visibility="public" enum="true"
-                 typedef="boolean" hwlevel="legacy">
-            <enum>
-              <value>FALSE</value>
-              <value>TRUE</value>
-            </enum>
-            <description>Whether this camera device has a
-            flash unit.</description>
-            <details>
-            Will be `false` if no flash is available.
-
-            If there is no flash unit, none of the flash controls do
-            anything.</details>
-            <tag id="BC" />
-          </entry>
-          <entry name="chargeDuration" type="int64">
-            <description>Time taken before flash can fire
-            again</description>
-            <units>nanoseconds</units>
-            <range>0-1e9</range>
-            <details>1 second too long/too short for recharge? Should
-            this be power-dependent?</details>
-            <tag id="FUTURE" />
-          </entry>
-        </namespace>
-        <entry name="colorTemperature" type="byte">
-          <description>The x,y whitepoint of the
-          flash</description>
-          <units>pair of floats</units>
-          <range>0-1 for both</range>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="maxEnergy" type="byte">
-          <description>Max energy output of the flash for a full
-          power single flash</description>
-          <units>lumen-seconds</units>
-          <range>&amp;gt;= 0</range>
-          <tag id="FUTURE" />
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.flash.firingPower" kind="controls">
-        </clone>
-        <clone entry="android.flash.firingTime" kind="controls">
-        </clone>
-        <clone entry="android.flash.mode" kind="controls"></clone>
-        <entry name="state" type="byte" visibility="public" enum="true"
-               hwlevel="limited">
-          <enum>
-            <value>UNAVAILABLE
-            <notes>No flash on camera.</notes></value>
-            <value>CHARGING
-            <notes>Flash is charging and cannot be fired.</notes></value>
-            <value>READY
-            <notes>Flash is ready to fire.</notes></value>
-            <value>FIRED
-            <notes>Flash fired for this capture.</notes></value>
-            <value>PARTIAL
-            <notes>Flash partially illuminated this frame.
-
-            This is usually due to the next or previous frame having
-            the flash fire, and the flash spilling into this capture
-            due to hardware limitations.</notes></value>
-          </enum>
-          <description>Current state of the flash
-          unit.</description>
-          <details>
-          When the camera device doesn't have flash unit
-          (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE.
-          Other states indicate the current flash status.
-
-          In certain conditions, this will be available on LEGACY devices:
-
-           * Flash-less cameras always return UNAVAILABLE.
-           * Using android.control.aeMode `==` ON_ALWAYS_FLASH
-             will always return FIRED.
-           * Using android.flash.mode `==` TORCH
-             will always return FIRED.
-
-          In all other conditions the state will not be available on
-          LEGACY devices (i.e. it will be `null`).
-          </details>
-        </entry>
-      </dynamic>
-    </section>
-    <section name="hotPixel">
-      <controls>
-        <entry name="mode" type="byte" visibility="public" enum="true">
-          <enum>
-            <value>OFF
-              <notes>
-              No hot pixel correction is applied.
-
-              The frame rate must not be reduced relative to sensor raw output
-              for this option.
-
-              The hotpixel map may be returned in android.statistics.hotPixelMap.
-              </notes>
-            </value>
-            <value>FAST
-              <notes>
-              Hot pixel correction is applied, without reducing frame
-              rate relative to sensor raw output.
-
-              The hotpixel map may be returned in android.statistics.hotPixelMap.
-              </notes>
-            </value>
-            <value>HIGH_QUALITY
-              <notes>
-              High-quality hot pixel correction is applied, at a cost
-              of possibly reduced frame rate relative to sensor raw output.
-
-              The hotpixel map may be returned in android.statistics.hotPixelMap.
-              </notes>
-            </value>
-          </enum>
-          <description>
-          Operational mode for hot pixel correction.
-          </description>
-          <range>android.hotPixel.availableHotPixelModes</range>
-          <details>
-          Hotpixel correction interpolates out, or otherwise removes, pixels
-          that do not accurately measure the incoming light (i.e. pixels that
-          are stuck at an arbitrary value or are oversensitive).
-          </details>
-          <tag id="V1" />
-          <tag id="RAW" />
-        </entry>
-      </controls>
-      <static>
-        <entry name="availableHotPixelModes" type="byte" visibility="public"
-          type_notes="list of enums" container="array" typedef="enumList">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of hot pixel correction modes for android.hotPixel.mode that are supported by this
-          camera device.
-          </description>
-          <range>Any value listed in android.hotPixel.mode</range>
-          <details>
-          FULL mode camera devices will always support FAST.
-          </details>
-          <hal_details>
-          To avoid performance issues, there will be significantly fewer hot
-          pixels than actual pixels on the camera sensor.
-          HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available
-          on the camera device, but the underlying implementation can be the same for both modes.
-          That is, if the highest quality implementation on the camera device does not slow down
-          capture rate, then FAST and HIGH_QUALITY will generate the same output.
-          </hal_details>
-          <tag id="V1" />
-          <tag id="RAW" />
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.hotPixel.mode" kind="controls">
-          <tag id="V1" />
-          <tag id="RAW" />
-        </clone>
-      </dynamic>
-    </section>
-    <section name="jpeg">
-      <controls>
-        <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true"
-        typedef="location" hwlevel="legacy">
-          <description>
-          A location object to use when generating image GPS metadata.
-          </description>
-          <details>
-          Setting a location object in a request will include the GPS coordinates of the location
-          into any JPEG images captured based on the request. These coordinates can then be
-          viewed by anyone who receives the JPEG image.
-          </details>
-        </entry>
-        <entry name="gpsCoordinates" type="double" visibility="ndk_public"
-        type_notes="latitude, longitude, altitude. First two in degrees, the third in meters"
-        container="array" hwlevel="legacy">
-          <array>
-            <size>3</size>
-          </array>
-          <description>GPS coordinates to include in output JPEG
-          EXIF.</description>
-          <range>(-180 - 180], [-90,90], [-inf, inf]</range>
-          <tag id="BC" />
-        </entry>
-        <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public"
-               typedef="string" hwlevel="legacy">
-          <description>32 characters describing GPS algorithm to
-          include in EXIF.</description>
-          <units>UTF-8 null-terminated string</units>
-          <tag id="BC" />
-        </entry>
-        <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy">
-          <description>Time GPS fix was made to include in
-          EXIF.</description>
-          <units>UTC in seconds since January 1, 1970</units>
-          <tag id="BC" />
-        </entry>
-        <entry name="orientation" type="int32" visibility="public" hwlevel="legacy">
-          <description>The orientation for a JPEG image.</description>
-          <units>Degrees in multiples of 90</units>
-          <range>0, 90, 180, 270</range>
-          <details>
-          The clockwise rotation angle in degrees, relative to the orientation
-          to the camera, that the JPEG picture needs to be rotated by, to be viewed
-          upright.
-
-          Camera devices may either encode this value into the JPEG EXIF header, or
-          rotate the image data to match this orientation. When the image data is rotated,
-          the thumbnail data will also be rotated.
-
-          Note that this orientation is relative to the orientation of the camera sensor, given
-          by android.sensor.orientation.
-
-          To translate from the device orientation given by the Android sensor APIs, the following
-          sample code may be used:
-
-              private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
-                  if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
-                  int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
-
-                  // Round device orientation to a multiple of 90
-                  deviceOrientation = (deviceOrientation + 45) / 90 * 90;
-
-                  // Reverse device orientation for front-facing cameras
-                  boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
-                  if (facingFront) deviceOrientation = -deviceOrientation;
-
-                  // Calculate desired JPEG orientation relative to camera orientation to make
-                  // the image upright relative to the device orientation
-                  int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
-
-                  return jpegOrientation;
-              }
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="quality" type="byte" visibility="public" hwlevel="legacy">
-          <description>Compression quality of the final JPEG
-          image.</description>
-          <range>1-100; larger is higher quality</range>
-          <details>85-95 is typical usage range.</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy">
-          <description>Compression quality of JPEG
-          thumbnail.</description>
-          <range>1-100; larger is higher quality</range>
-          <tag id="BC" />
-        </entry>
-        <entry name="thumbnailSize" type="int32" visibility="public"
-        container="array" typedef="size" hwlevel="legacy">
-          <array>
-            <size>2</size>
-          </array>
-          <description>Resolution of embedded JPEG thumbnail.</description>
-          <range>android.jpeg.availableThumbnailSizes</range>
-          <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
-          but the captured JPEG will still be a valid image.
-
-          For best results, when issuing a request for a JPEG image, the thumbnail size selected
-          should have the same aspect ratio as the main JPEG output.
-
-          If the thumbnail image aspect ratio differs from the JPEG primary image aspect
-          ratio, the camera device creates the thumbnail by cropping it from the primary image.
-          For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
-          16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
-          generate the thumbnail image. The thumbnail image will always have a smaller Field
-          Of View (FOV) than the primary image when aspect ratios differ.
-
-          When an android.jpeg.orientation of non-zero degree is requested,
-          the camera device will handle thumbnail rotation in one of the following ways:
-
-          * Set the
-            [EXIF orientation flag](https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION)
-            and keep jpeg and thumbnail image data unrotated.
-          * Rotate the jpeg and thumbnail image data and not set
-            [EXIF orientation flag](https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION).
-            In this case, LIMITED or FULL hardware level devices will report rotated thumnail size
-            in capture result, so the width and height will be interchanged if 90 or 270 degree
-            orientation is requested. LEGACY device will always report unrotated thumbnail size.
-          </details>
-          <hal_details>
-          The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail.
-          The cropping must be done on the primary jpeg image rather than the sensor active array.
-          The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't apply to the
-          thumbnail image cropping.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-      </controls>
-      <static>
-        <entry name="availableThumbnailSizes" type="int32" visibility="public"
-        container="array" typedef="size" hwlevel="legacy">
-          <array>
-            <size>2</size>
-            <size>n</size>
-          </array>
-          <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this
-          camera device.</description>
-          <details>
-          This list will include at least one non-zero resolution, plus `(0,0)` for indicating no
-          thumbnail should be generated.
-
-          Below condiditions will be satisfied for this size list:
-
-          * The sizes will be sorted by increasing pixel area (width x height).
-          If several resolutions have the same area, they will be sorted by increasing width.
-          * The aspect ratio of the largest thumbnail size will be same as the
-          aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
-          The largest size is defined as the size that has the largest pixel area
-          in a given size list.
-          * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
-          one corresponding size that has the same aspect ratio in availableThumbnailSizes,
-          and vice versa.
-          * All non-`(0, 0)` sizes will have non-zero widths and heights.</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="maxSize" type="int32" visibility="system">
-          <description>Maximum size in bytes for the compressed
-          JPEG buffer</description>
-          <range>Must be large enough to fit any JPEG produced by
-          the camera</range>
-          <details>This is used for sizing the gralloc buffers for
-          JPEG</details>
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.jpeg.gpsLocation" kind="controls">
-        </clone>
-        <clone entry="android.jpeg.gpsCoordinates" kind="controls">
-        </clone>
-        <clone entry="android.jpeg.gpsProcessingMethod"
-        kind="controls"></clone>
-        <clone entry="android.jpeg.gpsTimestamp" kind="controls">
-        </clone>
-        <clone entry="android.jpeg.orientation" kind="controls">
-        </clone>
-        <clone entry="android.jpeg.quality" kind="controls">
-        </clone>
-        <entry name="size" type="int32">
-          <description>The size of the compressed JPEG image, in
-          bytes</description>
-          <range>&amp;gt;= 0</range>
-          <details>If no JPEG output is produced for the request,
-          this must be 0.
-
-          Otherwise, this describes the real size of the compressed
-          JPEG image placed in the output stream.  More specifically,
-          if android.jpeg.maxSize = 1000000, and a specific capture
-          has android.jpeg.size = 500000, then the output buffer from
-          the JPEG stream will be 1000000 bytes, of which the first
-          500000 make up the real data.</details>
-          <tag id="FUTURE" />
-        </entry>
-        <clone entry="android.jpeg.thumbnailQuality"
-        kind="controls"></clone>
-        <clone entry="android.jpeg.thumbnailSize" kind="controls">
-        </clone>
-      </dynamic>
-    </section>
-    <section name="lens">
-      <controls>
-        <entry name="aperture" type="float" visibility="public" hwlevel="full">
-          <description>The desired lens aperture size, as a ratio of lens focal length to the
-          effective aperture diameter.</description>
-          <units>The f-number (f/N)</units>
-          <range>android.lens.info.availableApertures</range>
-          <details>Setting this value is only supported on the camera devices that have a variable
-          aperture lens.
-
-          When this is supported and android.control.aeMode is OFF,
-          this can be set along with android.sensor.exposureTime,
-          android.sensor.sensitivity, and android.sensor.frameDuration
-          to achieve manual exposure control.
-
-          The requested aperture value may take several frames to reach the
-          requested value; the camera device will report the current (intermediate)
-          aperture size in capture result metadata while the aperture is changing.
-          While the aperture is still changing, android.lens.state will be set to MOVING.
-
-          When this is supported and android.control.aeMode is one of
-          the ON modes, this will be overridden by the camera device
-          auto-exposure algorithm, the overridden values are then provided
-          back to the user in the corresponding result.</details>
-          <tag id="V1" />
-        </entry>
-        <entry name="filterDensity" type="float" visibility="public" hwlevel="full">
-          <description>
-          The desired setting for the lens neutral density filter(s).
-          </description>
-          <units>Exposure Value (EV)</units>
-          <range>android.lens.info.availableFilterDensities</range>
-          <details>
-          This control will not be supported on most camera devices.
-
-          Lens filters are typically used to lower the amount of light the
-          sensor is exposed to (measured in steps of EV). As used here, an EV
-          step is the standard logarithmic representation, which are
-          non-negative, and inversely proportional to the amount of light
-          hitting the sensor.  For example, setting this to 0 would result
-          in no reduction of the incoming light, and setting this to 2 would
-          mean that the filter is set to reduce incoming light by two stops
-          (allowing 1/4 of the prior amount of light to the sensor).
-
-          It may take several frames before the lens filter density changes
-          to the requested value. While the filter density is still changing,
-          android.lens.state will be set to MOVING.
-          </details>
-          <tag id="V1" />
-        </entry>
-        <entry name="focalLength" type="float" visibility="public" hwlevel="legacy">
-          <description>
-          The desired lens focal length; used for optical zoom.
-          </description>
-          <units>Millimeters</units>
-          <range>android.lens.info.availableFocalLengths</range>
-          <details>
-          This setting controls the physical focal length of the camera
-          device's lens. Changing the focal length changes the field of
-          view of the camera device, and is usually used for optical zoom.
-
-          Like android.lens.focusDistance and android.lens.aperture, this
-          setting won't be applied instantaneously, and it may take several
-          frames before the lens can change to the requested focal length.
-          While the focal length is still changing, android.lens.state will
-          be set to MOVING.
-
-          Optical zoom will not be supported on most devices.
-          </details>
-          <tag id="V1" />
-        </entry>
-        <entry name="focusDistance" type="float" visibility="public" hwlevel="full">
-          <description>Desired distance to plane of sharpest focus,
-          measured from frontmost surface of the lens.</description>
-          <units>See android.lens.info.focusDistanceCalibration for details</units>
-          <range>&amp;gt;= 0</range>
-          <details>
-          This control can be used for setting manual focus, on devices that support
-          the MANUAL_SENSOR capability and have a variable-focus lens (see
-          android.lens.info.minimumFocusDistance).
-
-          A value of `0.0f` means infinity focus. The value set will be clamped to
-          `[0.0f, android.lens.info.minimumFocusDistance]`.
-
-          Like android.lens.focalLength, this setting won't be applied
-          instantaneously, and it may take several frames before the lens
-          can move to the requested focus distance. While the lens is still moving,
-          android.lens.state will be set to MOVING.
-
-          LEGACY devices support at most setting this to `0.0f`
-          for infinity focus.
-          </details>
-          <tag id="BC" />
-          <tag id="V1" />
-        </entry>
-        <entry name="opticalStabilizationMode" type="byte" visibility="public"
-        enum="true" hwlevel="limited">
-          <enum>
-            <value>OFF
-              <notes>Optical stabilization is unavailable.</notes>
-            </value>
-            <value optional="true">ON
-              <notes>Optical stabilization is enabled.</notes>
-            </value>
-          </enum>
-          <description>
-          Sets whether the camera device uses optical image stabilization (OIS)
-          when capturing images.
-          </description>
-          <range>android.lens.info.availableOpticalStabilization</range>
-          <details>
-          OIS is used to compensate for motion blur due to small
-          movements of the camera during capture. Unlike digital image
-          stabilization (android.control.videoStabilizationMode), OIS
-          makes use of mechanical elements to stabilize the camera
-          sensor, and thus allows for longer exposure times before
-          camera shake becomes apparent.
-
-          Switching between different optical stabilization modes may take several
-          frames to initialize, the camera device will report the current mode in
-          capture result metadata. For example, When "ON" mode is requested, the
-          optical stabilization modes in the first several capture results may still
-          be "OFF", and it will become "ON" when the initialization is done.
-
-          If a camera device supports both OIS and digital image stabilization
-          (android.control.videoStabilizationMode), turning both modes on may produce undesirable
-          interaction, so it is recommended not to enable both at the same time.
-
-          Not all devices will support OIS; see
-          android.lens.info.availableOpticalStabilization for
-          available controls.
-          </details>
-          <tag id="V1" />
-        </entry>
-      </controls>
-      <static>
-        <namespace name="info">
-          <entry name="availableApertures" type="float" visibility="public"
-          container="array" hwlevel="full">
-            <array>
-              <size>n</size>
-            </array>
-            <description>List of aperture size values for android.lens.aperture that are
-            supported by this camera device.</description>
-            <units>The aperture f-number</units>
-            <details>If the camera device doesn't support a variable lens aperture,
-            this list will contain only one value, which is the fixed aperture size.
-
-            If the camera device supports a variable aperture, the aperture values
-            in this list will be sorted in ascending order.</details>
-            <tag id="V1" />
-          </entry>
-          <entry name="availableFilterDensities" type="float" visibility="public"
-          container="array" hwlevel="full">
-            <array>
-              <size>n</size>
-            </array>
-            <description>
-            List of neutral density filter values for
-            android.lens.filterDensity that are supported by this camera device.
-            </description>
-            <units>Exposure value (EV)</units>
-            <range>
-            Values are &amp;gt;= 0
-            </range>
-            <details>
-            If a neutral density filter is not supported by this camera device,
-            this list will contain only 0. Otherwise, this list will include every
-            filter density supported by the camera device, in ascending order.
-            </details>
-            <tag id="V1" />
-          </entry>
-          <entry name="availableFocalLengths" type="float" visibility="public"
-          type_notes="The list of available focal lengths"
-          container="array" hwlevel="legacy">
-            <array>
-              <size>n</size>
-            </array>
-            <description>
-            List of focal lengths for android.lens.focalLength that are supported by this camera
-            device.
-            </description>
-            <units>Millimeters</units>
-            <range>
-            Values are &amp;gt; 0
-            </range>
-            <details>
-            If optical zoom is not supported, this list will only contain
-            a single value corresponding to the fixed focal length of the
-            device. Otherwise, this list will include every focal length supported
-            by the camera device, in ascending order.
-            </details>
-            <tag id="BC" />
-            <tag id="V1" />
-          </entry>
-          <entry name="availableOpticalStabilization" type="byte"
-          visibility="public" type_notes="list of enums" container="array"
-          typedef="enumList" hwlevel="limited">
-            <array>
-              <size>n</size>
-            </array>
-            <description>
-            List of optical image stabilization (OIS) modes for
-            android.lens.opticalStabilizationMode that are supported by this camera device.
-            </description>
-            <range>Any value listed in android.lens.opticalStabilizationMode</range>
-            <details>
-            If OIS is not supported by a given camera device, this list will
-            contain only OFF.
-            </details>
-            <tag id="V1" />
-          </entry>
-          <entry name="hyperfocalDistance" type="float" visibility="public" optional="true"
-                 hwlevel="limited">
-            <description>Hyperfocal distance for this lens.</description>
-            <units>See android.lens.info.focusDistanceCalibration for details</units>
-            <range>If lens is fixed focus, &amp;gt;= 0. If lens has focuser unit, the value is
-            within `(0.0f, android.lens.info.minimumFocusDistance]`</range>
-            <details>
-            If the lens is not fixed focus, the camera device will report this
-            field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED.
-            </details>
-          </entry>
-          <entry name="minimumFocusDistance" type="float" visibility="public" optional="true"
-                 hwlevel="limited">
-            <description>Shortest distance from frontmost surface
-            of the lens that can be brought into sharp focus.</description>
-            <units>See android.lens.info.focusDistanceCalibration for details</units>
-            <range>&amp;gt;= 0</range>
-            <details>If the lens is fixed-focus, this will be
-            0.</details>
-            <hal_details>Mandatory for FULL devices; LIMITED devices
-            must always set this value to 0 for fixed-focus; and may omit
-            the minimum focus distance otherwise.
-
-            This field is also mandatory for all devices advertising
-            the MANUAL_SENSOR capability.</hal_details>
-            <tag id="V1" />
-          </entry>
-          <entry name="shadingMapSize" type="int32" visibility="ndk_public"
-                 type_notes="width and height (N, M) of lens shading map provided by the camera device."
-                 container="array" typedef="size" hwlevel="full">
-            <array>
-              <size>2</size>
-            </array>
-            <description>Dimensions of lens shading map.</description>
-            <range>Both values &amp;gt;= 1</range>
-            <details>
-            The map should be on the order of 30-40 rows and columns, and
-            must be smaller than 64x64.
-            </details>
-            <tag id="V1" />
-          </entry>
-          <entry name="focusDistanceCalibration" type="byte" visibility="public"
-                 enum="true" hwlevel="limited">
-            <enum>
-              <value>UNCALIBRATED
-                <notes>
-                The lens focus distance is not accurate, and the units used for
-                android.lens.focusDistance do not correspond to any physical units.
-
-                Setting the lens to the same focus distance on separate occasions may
-                result in a different real focus distance, depending on factors such
-                as the orientation of the device, the age of the focusing mechanism,
-                and the device temperature. The focus distance value will still be
-                in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0
-                represents the farthest focus.
-                </notes>
-              </value>
-              <value>APPROXIMATE
-                <notes>
-                The lens focus distance is measured in diopters.
-
-                However, setting the lens to the same focus distance
-                on separate occasions may result in a different real
-                focus distance, depending on factors such as the
-                orientation of the device, the age of the focusing
-                mechanism, and the device temperature.
-                </notes>
-              </value>
-              <value>CALIBRATED
-                <notes>
-                The lens focus distance is measured in diopters, and
-                is calibrated.
-
-                The lens mechanism is calibrated so that setting the
-                same focus distance is repeatable on multiple
-                occasions with good accuracy, and the focus distance
-                corresponds to the real physical distance to the plane
-                of best focus.
-                </notes>
-              </value>
-            </enum>
-            <description>The lens focus distance calibration quality.</description>
-            <details>
-            The lens focus distance calibration quality determines the reliability of
-            focus related metadata entries, i.e. android.lens.focusDistance,
-            android.lens.focusRange, android.lens.info.hyperfocalDistance, and
-            android.lens.info.minimumFocusDistance.
-
-            APPROXIMATE and CALIBRATED devices report the focus metadata in
-            units of diopters (1/meter), so `0.0f` represents focusing at infinity,
-            and increasing positive numbers represent focusing closer and closer
-            to the camera device. The focus distance control also uses diopters
-            on these devices.
-
-            UNCALIBRATED devices do not use units that are directly comparable
-            to any real physical measurement, but `0.0f` still represents farthest
-            focus, and android.lens.info.minimumFocusDistance represents the
-            nearest focus the device can achieve.
-            </details>
-            <hal_details>
-            For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity
-            focus) must work. When autofocus is disabled (android.control.afMode == OFF)
-            and the lens focus distance is set to 0 diopters
-            (android.lens.focusDistance == 0), the lens will move to focus at infinity
-            and is stably focused at infinity even if the device tilts. It may take the
-            lens some time to move; during the move the lens state should be MOVING and
-            the output diopter value should be changing toward 0.
-            </hal_details>
-          <tag id="V1" />
-        </entry>
-        </namespace>
-        <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy">
-          <enum>
-            <value>FRONT
-            <notes>
-              The camera device faces the same direction as the device's screen.
-            </notes></value>
-            <value>BACK
-            <notes>
-              The camera device faces the opposite direction as the device's screen.
-            </notes></value>
-            <value>EXTERNAL
-            <notes>
-              The camera device is an external camera, and has no fixed facing relative to the
-              device's screen.
-            </notes></value>
-          </enum>
-          <description>Direction the camera faces relative to
-          device screen.</description>
-        </entry>
-        <entry name="poseRotation" type="float" visibility="public"
-               container="array">
-          <array>
-            <size>4</size>
-          </array>
-          <description>
-            The orientation of the camera relative to the sensor
-            coordinate system.
-          </description>
-          <units>
-            Quaternion coefficients
-          </units>
-          <details>
-            The four coefficients that describe the quaternion
-            rotation from the Android sensor coordinate system to a
-            camera-aligned coordinate system where the X-axis is
-            aligned with the long side of the image sensor, the Y-axis
-            is aligned with the short side of the image sensor, and
-            the Z-axis is aligned with the optical axis of the sensor.
-
-            To convert from the quaternion coefficients `(x,y,z,w)`
-            to the axis of rotation `(a_x, a_y, a_z)` and rotation
-            amount `theta`, the following formulas can be used:
-
-                 theta = 2 * acos(w)
-                a_x = x / sin(theta/2)
-                a_y = y / sin(theta/2)
-                a_z = z / sin(theta/2)
-
-            To create a 3x3 rotation matrix that applies the rotation
-            defined by this quaternion, the following matrix can be
-            used:
-
-                R = [ 1 - 2y^2 - 2z^2,       2xy - 2zw,       2xz + 2yw,
-                           2xy + 2zw, 1 - 2x^2 - 2z^2,       2yz - 2xw,
-                           2xz - 2yw,       2yz + 2xw, 1 - 2x^2 - 2y^2 ]
-
-             This matrix can then be used to apply the rotation to a
-             column vector point with
-
-               `p' = Rp`
-
-             where `p` is in the device sensor coordinate system, and
-             `p'` is in the camera-oriented coordinate system.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-        <entry name="poseTranslation" type="float" visibility="public"
-               container="array">
-          <array>
-            <size>3</size>
-          </array>
-          <description>Position of the camera optical center.</description>
-          <units>Meters</units>
-          <details>
-            The position of the camera device's lens optical center,
-            as a three-dimensional vector `(x,y,z)`, relative to the
-            optical center of the largest camera device facing in the
-            same direction as this camera, in the
-            [Android sensor coordinate axes](https://developer.android.com/reference/android/hardware/SensorEvent.html).
-            Note that only the axis definitions are shared with
-            the sensor coordinate system, but not the origin.
-
-            If this device is the largest or only camera device with a
-            given facing, then this position will be `(0, 0, 0)`; a
-            camera device with a lens optical center located 3 cm from
-            the main sensor along the +X axis (to the right from the
-            user's perspective) will report `(0.03, 0, 0)`.
-
-            To transform a pixel coordinates between two cameras
-            facing the same direction, first the source camera
-            android.lens.radialDistortion must be corrected for.  Then
-            the source camera android.lens.intrinsicCalibration needs
-            to be applied, followed by the android.lens.poseRotation
-            of the source camera, the translation of the source camera
-            relative to the destination camera, the
-            android.lens.poseRotation of the destination camera, and
-            finally the inverse of android.lens.intrinsicCalibration
-            of the destination camera. This obtains a
-            radial-distortion-free coordinate in the destination
-            camera pixel coordinates.
-
-            To compare this against a real image from the destination
-            camera, the destination camera image then needs to be
-            corrected for radial distortion before comparison or
-            sampling.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.lens.aperture" kind="controls">
-          <tag id="V1" />
-        </clone>
-        <clone entry="android.lens.filterDensity" kind="controls">
-          <tag id="V1" />
-        </clone>
-        <clone entry="android.lens.focalLength" kind="controls">
-          <tag id="BC" />
-        </clone>
-        <clone entry="android.lens.focusDistance" kind="controls">
-          <details>Should be zero for fixed-focus cameras</details>
-          <tag id="BC" />
-        </clone>
-        <entry name="focusRange" type="float" visibility="public"
-        type_notes="Range of scene distances that are in focus"
-        container="array" typedef="pairFloatFloat" hwlevel="limited">
-          <array>
-            <size>2</size>
-          </array>
-          <description>The range of scene distances that are in
-          sharp focus (depth of field).</description>
-          <units>A pair of focus distances in diopters: (near,
-          far); see android.lens.info.focusDistanceCalibration for details.</units>
-          <range>&amp;gt;=0</range>
-          <details>If variable focus not supported, can still report
-          fixed depth of field range</details>
-          <tag id="BC" />
-        </entry>
-        <clone entry="android.lens.opticalStabilizationMode"
-        kind="controls">
-          <tag id="V1" />
-        </clone>
-        <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited">
-          <enum>
-            <value>STATIONARY
-              <notes>
-              The lens parameters (android.lens.focalLength, android.lens.focusDistance,
-              android.lens.filterDensity and android.lens.aperture) are not changing.
-              </notes>
-            </value>
-            <value>MOVING
-              <notes>
-              One or several of the lens parameters
-              (android.lens.focalLength, android.lens.focusDistance,
-              android.lens.filterDensity or android.lens.aperture) is
-              currently changing.
-              </notes>
-            </value>
-          </enum>
-          <description>Current lens status.</description>
-          <details>
-          For lens parameters android.lens.focalLength, android.lens.focusDistance,
-          android.lens.filterDensity and android.lens.aperture, when changes are requested,
-          they may take several frames to reach the requested values. This state indicates
-          the current status of the lens parameters.
-
-          When the state is STATIONARY, the lens parameters are not changing. This could be
-          either because the parameters are all fixed, or because the lens has had enough
-          time to reach the most recently-requested values.
-          If all these lens parameters are not changable for a camera device, as listed below:
-
-          * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means
-          android.lens.focusDistance parameter will always be 0.
-          * Fixed focal length (android.lens.info.availableFocalLengths contains single value),
-          which means the optical zoom is not supported.
-          * No ND filter (android.lens.info.availableFilterDensities contains only 0).
-          * Fixed aperture (android.lens.info.availableApertures contains single value).
-
-          Then this state will always be STATIONARY.
-
-          When the state is MOVING, it indicates that at least one of the lens parameters
-          is changing.
-          </details>
-          <tag id="V1" />
-        </entry>
-        <clone entry="android.lens.poseRotation" kind="static">
-        </clone>
-        <clone entry="android.lens.poseTranslation" kind="static">
-        </clone>
-      </dynamic>
-      <static>
-        <entry name="intrinsicCalibration" type="float" visibility="public"
-               container="array">
-          <array>
-            <size>5</size>
-          </array>
-          <description>
-            The parameters for this camera device's intrinsic
-            calibration.
-          </description>
-          <units>
-            Pixels in the
-            android.sensor.info.preCorrectionActiveArraySize
-            coordinate system.
-          </units>
-          <details>
-            The five calibration parameters that describe the
-            transform from camera-centric 3D coordinates to sensor
-            pixel coordinates:
-
-                [f_x, f_y, c_x, c_y, s]
-
-            Where `f_x` and `f_y` are the horizontal and vertical
-            focal lengths, `[c_x, c_y]` is the position of the optical
-            axis, and `s` is a skew parameter for the sensor plane not
-            being aligned with the lens plane.
-
-            These are typically used within a transformation matrix K:
-
-                K = [ f_x,   s, c_x,
-                       0, f_y, c_y,
-                       0    0,   1 ]
-
-            which can then be combined with the camera pose rotation
-            `R` and translation `t` (android.lens.poseRotation and
-            android.lens.poseTranslation, respective) to calculate the
-            complete transform from world coordinates to pixel
-            coordinates:
-
-                P = [ K 0   * [ R t
-                     0 1 ]     0 1 ]
-
-            and with `p_w` being a point in the world coordinate system
-            and `p_s` being a point in the camera active pixel array
-            coordinate system, and with the mapping including the
-            homogeneous division by z:
-
-                 p_h = (x_h, y_h, z_h) = P p_w
-                p_s = p_h / z_h
-
-            so `[x_s, y_s]` is the pixel coordinates of the world
-            point, `z_s = 1`, and `w_s` is a measurement of disparity
-            (depth) in pixel coordinates.
-
-            Note that the coordinate system for this transform is the
-            android.sensor.info.preCorrectionActiveArraySize system,
-            where `(0,0)` is the top-left of the
-            preCorrectionActiveArraySize rectangle. Once the pose and
-            intrinsic calibration transforms have been applied to a
-            world point, then the android.lens.radialDistortion
-            transform needs to be applied, and the result adjusted to
-            be in the android.sensor.info.activeArraySize coordinate
-            system (where `(0, 0)` is the top-left of the
-            activeArraySize rectangle), to determine the final pixel
-            coordinate of the world point for processed (non-RAW)
-            output buffers.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-        <entry name="radialDistortion" type="float" visibility="public"
-               container="array">
-          <array>
-            <size>6</size>
-          </array>
-          <description>
-            The correction coefficients to correct for this camera device's
-            radial and tangential lens distortion.
-          </description>
-          <units>
-            Unitless coefficients.
-          </units>
-          <details>
-            Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2,
-            kappa_3]` and two tangential distortion coefficients
-            `[kappa_4, kappa_5]` that can be used to correct the
-            lens's geometric distortion with the mapping equations:
-
-                 x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
-                       kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
-                 y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
-                       kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
-
-            Here, `[x_c, y_c]` are the coordinates to sample in the
-            input image that correspond to the pixel values in the
-            corrected image at the coordinate `[x_i, y_i]`:
-
-                 correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
-
-            The pixel coordinates are defined in a normalized
-            coordinate system related to the
-            android.lens.intrinsicCalibration calibration fields.
-            Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the
-            lens optical center `[c_x, c_y]`. The maximum magnitudes
-            of both x and y coordinates are normalized to be 1 at the
-            edge further from the optical center, so the range
-            for both dimensions is `-1 &lt;= x &lt;= 1`.
-
-            Finally, `r` represents the radial distance from the
-            optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude
-            is therefore no larger than `|r| &lt;= sqrt(2)`.
-
-            The distortion model used is the Brown-Conrady model.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.lens.intrinsicCalibration" kind="static">
-        </clone>
-        <clone entry="android.lens.radialDistortion" kind="static">
-        </clone>
-      </dynamic>
-    </section>
-    <section name="noiseReduction">
-      <controls>
-        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
-          <enum>
-            <value>OFF
-            <notes>No noise reduction is applied.</notes></value>
-            <value>FAST
-            <notes>Noise reduction is applied without reducing frame rate relative to sensor
-            output. It may be the same as OFF if noise reduction will reduce frame rate
-            relative to sensor.</notes></value>
-            <value>HIGH_QUALITY
-            <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame
-            rate relative to sensor output.</notes></value>
-            <value optional="true">MINIMAL
-            <notes>MINIMAL noise reduction is applied without reducing frame rate relative to
-            sensor output. </notes></value>
-            <value optional="true">ZERO_SHUTTER_LAG
-
-            <notes>Noise reduction is applied at different levels for different output streams,
-            based on resolution. Streams at maximum recording resolution (see {@link
-            ACameraDevice_createCaptureSession}) or below have noise
-            reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
-            noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
-            for low-resolution streams is tuned so that frame rate is not impacted, and the quality
-            is equal to or better than FAST (since it is only applied to lower-resolution outputs,
-            quality may improve from FAST).
-
-            This mode is intended to be used by applications operating in a zero-shutter-lag mode
-            with YUV or PRIVATE reprocessing, where the application continuously captures
-            high-resolution intermediate buffers into a circular buffer, from which a final image is
-            produced via reprocessing when a user takes a picture.  For such a use case, the
-            high-resolution buffers must not have noise reduction applied to maximize efficiency of
-            preview and to avoid over-applying noise filtering when reprocessing, while
-            low-resolution buffers (used for recording or preview, generally) need noise reduction
-            applied for reasonable preview quality.
-
-            This mode is guaranteed to be supported by devices that support either the
-            YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
-            (android.request.availableCapabilities lists either of those capabilities) and it will
-            be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.
-            </notes></value>
-          </enum>
-          <description>Mode of operation for the noise reduction algorithm.</description>
-          <range>android.noiseReduction.availableNoiseReductionModes</range>
-          <details>The noise reduction algorithm attempts to improve image quality by removing
-          excessive noise added by the capture process, especially in dark conditions.
-
-          OFF means no noise reduction will be applied by the camera device, for both raw and
-          YUV domain.
-
-          MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
-          demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
-          This mode is optional, may not be support by all devices. The application should check
-          android.noiseReduction.availableNoiseReductionModes before using it.
-
-          FAST/HIGH_QUALITY both mean camera device determined noise filtering
-          will be applied. HIGH_QUALITY mode indicates that the camera device
-          will use the highest-quality noise filtering algorithms,
-          even if it slows down capture rate. FAST means the camera device will not
-          slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
-          MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
-          Every output stream will have a similar amount of enhancement applied.
-
-          ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
-          buffer of high-resolution images during preview and reprocess image(s) from that buffer
-          into a final capture when triggered by the user. In this mode, the camera device applies
-          noise reduction to low-resolution streams (below maximum recording resolution) to maximize
-          preview quality, but does not apply noise reduction to high-resolution streams, since
-          those will be reprocessed later if necessary.
-
-          For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
-          will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
-          may adjust the noise reduction parameters for best image quality based on the
-          android.reprocess.effectiveExposureFactor if it is set.
-          </details>
-          <hal_details>
-          For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to
-          adjust the internal noise reduction parameters appropriately to get the best quality
-          images.
-          </hal_details>
-          <tag id="V1" />
-          <tag id="REPROC" />
-        </entry>
-        <entry name="strength" type="byte">
-          <description>Control the amount of noise reduction
-          applied to the images</description>
-          <units>1-10; 10 is max noise reduction</units>
-          <range>1 - 10</range>
-          <tag id="FUTURE" />
-        </entry>
-      </controls>
-      <static>
-        <entry name="availableNoiseReductionModes" type="byte" visibility="public"
-        type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of noise reduction modes for android.noiseReduction.mode that are supported
-          by this camera device.
-          </description>
-          <range>Any value listed in android.noiseReduction.mode</range>
-          <details>
-          Full-capability camera devices will always support OFF and FAST.
-
-          Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
-          ZERO_SHUTTER_LAG.
-
-          Legacy-capability camera devices will only support FAST mode.
-          </details>
-          <hal_details>
-          HAL must support both FAST and HIGH_QUALITY if noise reduction control is available
-          on the camera device, but the underlying implementation can be the same for both modes.
-          That is, if the highest quality implementation on the camera device does not slow down
-          capture rate, then FAST and HIGH_QUALITY will generate the same output.
-          </hal_details>
-          <tag id="V1" />
-          <tag id="REPROC" />
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.noiseReduction.mode" kind="controls">
-          <tag id="V1" />
-          <tag id="REPROC" />
-        </clone>
-      </dynamic>
-    </section>
-    <section name="quirks">
-      <static>
-        <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true">
-          <description>If set to 1, the camera service does not
-          scale 'normalized' coordinates with respect to the crop
-          region. This applies to metering input (a{e,f,wb}Region
-          and output (face rectangles).</description>
-          <details>Normalized coordinates refer to those in the
-          (-1000,1000) range mentioned in the
-          android.hardware.Camera API.
-
-          HAL implementations should instead always use and emit
-          sensor array-relative coordinates for all region data. Does
-          not need to be listed in static metadata. Support will be
-          removed in future versions of camera service.</details>
-        </entry>
-        <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true">
-          <description>If set to 1, then the camera service always
-          switches to FOCUS_MODE_AUTO before issuing a AF
-          trigger.</description>
-          <details>HAL implementations should implement AF trigger
-          modes for AUTO, MACRO, CONTINUOUS_FOCUS, and
-          CONTINUOUS_PICTURE modes instead of using this flag. Does
-          not need to be listed in static metadata. Support will be
-          removed in future versions of camera service</details>
-        </entry>
-        <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true">
-          <description>If set to 1, the camera service uses
-          CAMERA2_PIXEL_FORMAT_ZSL instead of
-          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
-          shutter lag stream</description>
-          <details>HAL implementations should use gralloc usage flags
-          to determine that a stream will be used for
-          zero-shutter-lag, instead of relying on an explicit
-          format setting. Does not need to be listed in static
-          metadata. Support will be removed in future versions of
-          camera service.</details>
-        </entry>
-        <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true">
-          <description>
-          If set to 1, the HAL will always split result
-          metadata for a single capture into multiple buffers,
-          returned using multiple process_capture_result calls.
-          </description>
-          <details>
-          Does not need to be listed in static
-          metadata. Support for partial results will be reworked in
-          future versions of camera service. This quirk will stop
-          working at that point; DO NOT USE without careful
-          consideration of future support.
-          </details>
-          <hal_details>
-          Refer to `camera3_capture_result::partial_result`
-          for information on how to implement partial results.
-          </hal_details>
-        </entry>
-      </static>
-      <dynamic>
-        <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean">
-          <enum>
-            <value>FINAL
-            <notes>The last or only metadata result buffer
-            for this capture.</notes>
-            </value>
-            <value>PARTIAL
-            <notes>A partial buffer of result metadata for this
-            capture. More result buffers for this capture will be sent
-            by the camera device, the last of which will be marked
-            FINAL.</notes>
-            </value>
-          </enum>
-          <description>
-          Whether a result given to the framework is the
-          final one for the capture, or only a partial that contains a
-          subset of the full set of dynamic metadata
-          values.</description>
-          <range>Optional. Default value is FINAL.</range>
-          <details>
-          The entries in the result metadata buffers for a
-          single capture may not overlap, except for this entry. The
-          FINAL buffers must retain FIFO ordering relative to the
-          requests that generate them, so the FINAL buffer for frame 3 must
-          always be sent to the framework after the FINAL buffer for frame 2, and
-          before the FINAL buffer for frame 4. PARTIAL buffers may be returned
-          in any order relative to other frames, but all PARTIAL buffers for a given
-          capture must arrive before the FINAL buffer for that capture. This entry may
-          only be used by the camera device if quirks.usePartialResult is set to 1.
-          </details>
-          <hal_details>
-          Refer to `camera3_capture_result::partial_result`
-          for information on how to implement partial results.
-          </hal_details>
-        </entry>
-      </dynamic>
-    </section>
-    <section name="request">
-      <controls>
-        <entry name="frameCount" type="int32" visibility="system" deprecated="true">
-          <description>A frame counter set by the framework. Must
-          be maintained unchanged in output frame. This value monotonically
-          increases with every new result (that is, each new result has a unique
-          frameCount value).
-          </description>
-          <units>incrementing integer</units>
-          <range>Any int.</range>
-        </entry>
-        <entry name="id" type="int32" visibility="hidden">
-          <description>An application-specified ID for the current
-          request. Must be maintained unchanged in output
-          frame</description>
-          <units>arbitrary integer assigned by application</units>
-          <range>Any int</range>
-          <tag id="V1" />
-        </entry>
-        <entry name="inputStreams" type="int32" visibility="system" deprecated="true"
-               container="array">
-          <array>
-            <size>n</size>
-          </array>
-          <description>List which camera reprocess stream is used
-          for the source of reprocessing data.</description>
-          <units>List of camera reprocess stream IDs</units>
-          <range>
-          Typically, only one entry allowed, must be a valid reprocess stream ID.
-          </range>
-          <details>Only meaningful when android.request.type ==
-          REPROCESS. Ignored otherwise</details>
-          <tag id="HAL2" />
-        </entry>
-        <entry name="metadataMode" type="byte" visibility="system"
-               enum="true">
-          <enum>
-            <value>NONE
-            <notes>No metadata should be produced on output, except
-            for application-bound buffer data. If no
-            application-bound streams exist, no frame should be
-            placed in the output frame queue. If such streams
-            exist, a frame should be placed on the output queue
-            with null metadata but with the necessary output buffer
-            information. Timestamp information should still be
-            included with any output stream buffers</notes></value>
-            <value>FULL
-            <notes>All metadata should be produced. Statistics will
-            only be produced if they are separately
-            enabled</notes></value>
-          </enum>
-          <description>How much metadata to produce on
-          output</description>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="outputStreams" type="int32" visibility="system" deprecated="true"
-               container="array">
-          <array>
-            <size>n</size>
-          </array>
-          <description>Lists which camera output streams image data
-          from this capture must be sent to</description>
-          <units>List of camera stream IDs</units>
-          <range>List must only include streams that have been
-          created</range>
-          <details>If no output streams are listed, then the image
-          data should simply be discarded. The image data must
-          still be captured for metadata and statistics production,
-          and the lens and flash must operate as requested.</details>
-          <tag id="HAL2" />
-        </entry>
-        <entry name="type" type="byte" visibility="system" deprecated="true" enum="true">
-          <enum>
-            <value>CAPTURE
-            <notes>Capture a new image from the imaging hardware,
-            and process it according to the
-            settings</notes></value>
-            <value>REPROCESS
-            <notes>Process previously captured data; the
-            android.request.inputStreams parameter determines the
-            source reprocessing stream. TODO: Mark dynamic metadata
-            needed for reprocessing with [RP]</notes></value>
-          </enum>
-          <description>The type of the request; either CAPTURE or
-          REPROCESS. For HAL3, this tag is redundant.
-          </description>
-          <tag id="HAL2" />
-        </entry>
-      </controls>
-      <static>
-        <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>3</size>
-          </array>
-          <description>The maximum numbers of different types of output streams
-          that can be configured and used simultaneously by a camera device.
-          </description>
-          <range>
-          For processed (and stalling) format streams, &amp;gt;= 1.
-
-          For Raw format (either stalling or non-stalling) streams, &amp;gt;= 0.
-
-          For processed (but not stalling) format streams, &amp;gt;= 3
-          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
-          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
-          </range>
-          <details>
-          This is a 3 element tuple that contains the max number of output simultaneous
-          streams for raw sensor, processed (but not stalling), and processed (and stalling)
-          formats respectively. For example, assuming that JPEG is typically a processed and
-          stalling stream, if max raw sensor format output stream number is 1, max YUV streams
-          number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`.
-
-          This lists the upper bound of the number of output streams supported by
-          the camera device. Using more streams simultaneously may require more hardware and
-          CPU resources that will consume more power. The image format for an output stream can
-          be any supported format provided by android.scaler.availableStreamConfigurations.
-          The formats defined in android.scaler.availableStreamConfigurations can be catergorized
-          into the 3 stream types as below:
-
-          * Processed (but stalling): any non-RAW format with a stallDurations &amp;gt; 0.
-            Typically {@link AIMAGE_FORMAT_JPEG} format.
-          * Raw formats: {@link AIMAGE_FORMAT_RAW16}, {@link AIMAGE_FORMAT_RAW10}, or
-            {@link AIMAGE_FORMAT_RAW12}.
-          * Processed (but not-stalling): any non-RAW format without a stall duration.
-            Typically {@link AIMAGE_FORMAT_YUV_420_888}.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true"
-               hwlevel="legacy">
-          <description>The maximum numbers of different types of output streams
-          that can be configured and used simultaneously by a camera device
-          for any `RAW` formats.
-          </description>
-          <range>
-          &amp;gt;= 0
-          </range>
-          <details>
-          This value contains the max number of output simultaneous
-          streams from the raw sensor.
-
-          This lists the upper bound of the number of output streams supported by
-          the camera device. Using more streams simultaneously may require more hardware and
-          CPU resources that will consume more power. The image format for this kind of an output stream can
-          be any `RAW` and supported format provided by android.scaler.streamConfigurationMap.
-
-          In particular, a `RAW` format is typically one of:
-
-          * {@link AIMAGE_FORMAT_RAW16}
-          * {@link AIMAGE_FORMAT_RAW10}
-          * {@link AIMAGE_FORMAT_RAW12}
-
-          LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY)
-          never support raw streams.
-          </details>
-        </entry>
-        <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true"
-               hwlevel="legacy">
-          <description>The maximum numbers of different types of output streams
-          that can be configured and used simultaneously by a camera device
-          for any processed (but not-stalling) formats.
-          </description>
-          <range>
-          &amp;gt;= 3
-          for FULL mode devices (`android.info.supportedHardwareLevel == FULL`);
-          &amp;gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`).
-          </range>
-          <details>
-          This value contains the max number of output simultaneous
-          streams for any processed (but not-stalling) formats.
-
-          This lists the upper bound of the number of output streams supported by
-          the camera device. Using more streams simultaneously may require more hardware and
-          CPU resources that will consume more power. The image format for this kind of an output stream can
-          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
-
-          Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
-          Typically:
-
-          * {@link AIMAGE_FORMAT_YUV_420_888}
-          * Implementation-defined formats, i.e. {@link
-            android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)}
-
-          For full guarantees, query {@link
-          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
-          processed format -- it will return 0 for a non-stalling stream.
-
-          LEGACY devices will support at least 2 processing/non-stalling streams.
-          </details>
-        </entry>
-        <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true"
-               hwlevel="legacy">
-          <description>The maximum numbers of different types of output streams
-          that can be configured and used simultaneously by a camera device
-          for any processed (and stalling) formats.
-          </description>
-          <range>
-          &amp;gt;= 1
-          </range>
-          <details>
-          This value contains the max number of output simultaneous
-          streams for any processed (but not-stalling) formats.
-
-          This lists the upper bound of the number of output streams supported by
-          the camera device. Using more streams simultaneously may require more hardware and
-          CPU resources that will consume more power. The image format for this kind of an output stream can
-          be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap.
-
-          A processed and stalling format is defined as any non-RAW format with a stallDurations
-          &amp;gt; 0.  Typically only the {@link AIMAGE_FORMAT_JPEG} format is a
-          stalling format.
-
-          For full guarantees, query {@link
-          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a
-          processed format -- it will return a non-0 value for a stalling stream.
-
-          LEGACY devices will support up to 1 processing/stalling stream.
-          </details>
-        </entry>
-        <entry name="maxNumReprocessStreams" type="int32" visibility="system"
-        deprecated="true" container="array">
-          <array>
-            <size>1</size>
-          </array>
-          <description>How many reprocessing streams of any type
-          can be allocated at the same time.</description>
-          <range>&amp;gt;= 0</range>
-          <details>
-          Only used by HAL2.x.
-
-          When set to 0, it means no reprocess stream is supported.
-          </details>
-          <tag id="HAL2" />
-        </entry>
-        <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full">
-          <description>
-          The maximum numbers of any type of input streams
-          that can be configured and used simultaneously by a camera device.
-          </description>
-          <range>
-          0 or 1.
-          </range>
-          <details>When set to 0, it means no input stream is supported.
-
-          The image format for a input stream can be any supported format returned by {@link
-          android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an
-          input stream, there must be at least one output stream configured to to receive the
-          reprocessed images.
-
-          When an input stream and some output streams are used in a reprocessing request,
-          only the input buffer will be used to produce these output stream buffers, and a
-          new sensor image will not be captured.
-
-          For example, for Zero Shutter Lag (ZSL) still capture use case, the input
-          stream image format will be PRIVATE, the associated output stream image format
-          should be JPEG.
-          </details>
-          <hal_details>
-          For the reprocessing flow and controls, see
-          hardware/libhardware/include/hardware/camera3.h Section 10 for more details.
-          </hal_details>
-          <tag id="REPROC" />
-        </entry>
-      </static>
-      <dynamic>
-        <entry name="frameCount" type="int32" visibility="hidden" deprecated="true">
-          <description>A frame counter set by the framework. This value monotonically
-          increases with every new result (that is, each new result has a unique
-          frameCount value).</description>
-          <units>count of frames</units>
-          <range>&amp;gt; 0</range>
-          <details>Reset on release()</details>
-        </entry>
-        <clone entry="android.request.id" kind="controls"></clone>
-        <clone entry="android.request.metadataMode"
-        kind="controls"></clone>
-        <clone entry="android.request.outputStreams"
-        kind="controls"></clone>
-        <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy">
-          <description>Specifies the number of pipeline stages the frame went
-          through from when it was exposed to when the final completed result
-          was available to the framework.</description>
-          <range>&amp;lt;= android.request.pipelineMaxDepth</range>
-          <details>Depending on what settings are used in the request, and
-          what streams are configured, the data may undergo less processing,
-          and some pipeline stages skipped.
-
-          See android.request.pipelineMaxDepth for more details.
-          </details>
-          <hal_details>
-          This value must always represent the accurate count of how many
-          pipeline stages were actually used.
-          </hal_details>
-        </entry>
-      </dynamic>
-      <static>
-        <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy">
-          <description>Specifies the number of maximum pipeline stages a frame
-          has to go through from when it's exposed to when it's available
-          to the framework.</description>
-          <details>A typical minimum value for this is 2 (one stage to expose,
-          one stage to readout) from the sensor. The ISP then usually adds
-          its own stages to do custom HW processing. Further stages may be
-          added by SW processing.
-
-          Depending on what settings are used (e.g. YUV, JPEG) and what
-          processing is enabled (e.g. face detection), the actual pipeline
-          depth (specified by android.request.pipelineDepth) may be less than
-          the max pipeline depth.
-
-          A pipeline depth of X stages is equivalent to a pipeline latency of
-          X frame intervals.
-
-          This value will normally be 8 or less, however, for high speed capture session,
-          the max pipeline depth will be up to 8 x size of high speed capture request list.
-          </details>
-          <hal_details>
-          This value should be 4 or less, expect for the high speed recording session, where the
-          max batch sizes may be larger than 1.
-          </hal_details>
-        </entry>
-        <entry name="partialResultCount" type="int32" visibility="public" optional="true">
-          <description>Defines how many sub-components
-          a result will be composed of.
-          </description>
-          <range>&amp;gt;= 1</range>
-          <details>In order to combat the pipeline latency, partial results
-          may be delivered to the application layer from the camera device as
-          soon as they are available.
-
-          Optional; defaults to 1. A value of 1 means that partial
-          results are not supported, and only the final TotalCaptureResult will
-          be produced by the camera device.
-
-          A typical use case for this might be: after requesting an
-          auto-focus (AF) lock the new AF state might be available 50%
-          of the way through the pipeline.  The camera device could
-          then immediately dispatch this state via a partial result to
-          the application, and the rest of the metadata via later
-          partial results.
-          </details>
-        </entry>
-        <entry name="availableCapabilities" type="byte" visibility="public"
-          enum="true" container="array" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <enum>
-            <value>BACKWARD_COMPATIBLE
-              <notes>The minimal set of capabilities that every camera
-                device (regardless of android.info.supportedHardwareLevel)
-                supports.
-
-                This capability is listed by all normal devices, and
-                indicates that the camera device has a feature set
-                that's comparable to the baseline requirements for the
-                older android.hardware.Camera API.
-
-                Devices with the DEPTH_OUTPUT capability might not list this
-                capability, indicating that they support only depth measurement,
-                not standard color output.
-              </notes>
-            </value>
-            <value optional="true">MANUAL_SENSOR
-              <notes>
-              The camera device can be manually controlled (3A algorithms such
-              as auto-exposure, and auto-focus can be bypassed).
-              The camera device supports basic manual control of the sensor image
-              acquisition related stages. This means the following controls are
-              guaranteed to be supported:
-
-              * Manual frame duration control
-                  * android.sensor.frameDuration
-                  * android.sensor.info.maxFrameDuration
-              * Manual exposure control
-                  * android.sensor.exposureTime
-                  * android.sensor.info.exposureTimeRange
-              * Manual sensitivity control
-                  * android.sensor.sensitivity
-                  * android.sensor.info.sensitivityRange
-              * Manual lens control (if the lens is adjustable)
-                  * android.lens.*
-              * Manual flash control (if a flash unit is present)
-                  * android.flash.*
-              * Manual black level locking
-                  * android.blackLevel.lock
-              * Auto exposure lock
-                  * android.control.aeLock
-
-              If any of the above 3A algorithms are enabled, then the camera
-              device will accurately report the values applied by 3A in the
-              result.
-
-              A given camera device may also support additional manual sensor controls,
-              but this capability only covers the above list of controls.
-
-              If this is supported, android.scaler.streamConfigurationMap will
-              additionally return a min frame duration that is greater than
-              zero for each supported size-format combination.
-              </notes>
-            </value>
-            <value optional="true">MANUAL_POST_PROCESSING
-              <notes>
-              The camera device post-processing stages can be manually controlled.
-              The camera device supports basic manual control of the image post-processing
-              stages. This means the following controls are guaranteed to be supported:
-
-              * Manual tonemap control
-                  * android.tonemap.curve
-                  * android.tonemap.mode
-                  * android.tonemap.maxCurvePoints
-                  * android.tonemap.gamma
-                  * android.tonemap.presetCurve
-
-              * Manual white balance control
-                  * android.colorCorrection.transform
-                  * android.colorCorrection.gains
-              * Manual lens shading map control
-                    * android.shading.mode
-                    * android.statistics.lensShadingMapMode
-                    * android.statistics.lensShadingMap
-                    * android.lens.info.shadingMapSize
-              * Manual aberration correction control (if aberration correction is supported)
-                    * android.colorCorrection.aberrationMode
-                    * android.colorCorrection.availableAberrationModes
-              * Auto white balance lock
-                    * android.control.awbLock
-
-              If auto white balance is enabled, then the camera device
-              will accurately report the values applied by AWB in the result.
-
-              A given camera device may also support additional post-processing
-              controls, but this capability only covers the above list of controls.
-              </notes>
-            </value>
-            <value optional="true">RAW
-              <notes>
-              The camera device supports outputting RAW buffers and
-              metadata for interpreting them.
-
-              Devices supporting the RAW capability allow both for
-              saving DNG files, and for direct application processing of
-              raw sensor images.
-
-              * RAW_SENSOR is supported as an output format.
-              * The maximum available resolution for RAW_SENSOR streams
-                will match either the value in
-                android.sensor.info.pixelArraySize or
-                android.sensor.info.preCorrectionActiveArraySize.
-              * All DNG-related optional metadata entries are provided
-                by the camera device.
-              </notes>
-            </value>
-            <value optional="true" ndk_hidden="true">PRIVATE_REPROCESSING
-              <notes>
-              The camera device supports the Zero Shutter Lag reprocessing use case.
-
-              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
-              * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format,
-                that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of
-                formats returned by {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
-              * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
-                returns non empty int[] for each supported input format returned by {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
-              * Each size returned by {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
-                getInputSizes(ImageFormat.PRIVATE)} is also included in {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
-                getOutputSizes(ImageFormat.PRIVATE)}
-              * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop
-                relative to the sensor's maximum capture rate (at that resolution).
-              * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both
-                {@link android.graphics.ImageFormat#YUV_420_888} and
-                {@link android.graphics.ImageFormat#JPEG} formats.
-              * The maximum available resolution for PRIVATE streams
-                (both input/output) will match the maximum available
-                resolution of JPEG streams.
-              * Static metadata android.reprocess.maxCaptureStall.
-              * Only below controls are effective for reprocessing requests and
-                will be present in capture results, other controls in reprocess
-                requests will be ignored by the camera device.
-                    * android.jpeg.*
-                    * android.noiseReduction.mode
-                    * android.edge.mode
-              * android.noiseReduction.availableNoiseReductionModes and
-                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
-              </notes>
-            </value>
-            <value optional="true">READ_SENSOR_SETTINGS
-              <notes>
-              The camera device supports accurately reporting the sensor settings for many of
-              the sensor controls while the built-in 3A algorithm is running.  This allows
-              reporting of sensor settings even when these settings cannot be manually changed.
-
-              The values reported for the following controls are guaranteed to be available
-              in the CaptureResult, including when 3A is enabled:
-
-              * Exposure control
-                  * android.sensor.exposureTime
-              * Sensitivity control
-                  * android.sensor.sensitivity
-              * Lens controls (if the lens is adjustable)
-                  * android.lens.focusDistance
-                  * android.lens.aperture
-
-              This capability is a subset of the MANUAL_SENSOR control capability, and will
-              always be included if the MANUAL_SENSOR capability is available.
-              </notes>
-            </value>
-            <value optional="true">BURST_CAPTURE
-              <notes>
-              The camera device supports capturing high-resolution images at &gt;= 20 frames per
-              second, in at least the uncompressed YUV format, when post-processing settings are set
-              to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
-              per second.  Here, 'high resolution' means at least 8 megapixels, or the maximum
-              resolution of the device, whichever is smaller.
-
-              More specifically, this means that at least one output {@link
-              AIMAGE_FORMAT_YUV_420_888} size listed in
-              {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} is larger or equal to the
-              'high resolution' defined above, and can be captured at at least 20 fps.
-              For the largest {@link AIMAGE_FORMAT_YUV_420_888} size listed in
-              {@link ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, camera device can capture this
-              size for at least 10 frames per second.
-              Also the android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range
-              where the minimum FPS is &gt;= 1 / minimumFrameDuration for the largest YUV_420_888 size.
-
-              If the device supports the {@link AIMAGE_FORMAT_RAW10}, {@link
-              AIMAGE_FORMAT_RAW12}, then those can also be captured at the same rate
-              as the maximum-size YUV_420_888 resolution is.
-
-              In addition, the android.sync.maxLatency field is guaranted to have a value between 0
-              and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable
-              are also guaranteed to be `true` so burst capture with these two locks ON yields
-              consistent image output.
-              </notes>
-            </value>
-            <value optional="true" ndk_hidden="true">YUV_REPROCESSING
-              <notes>
-              The camera device supports the YUV_420_888 reprocessing use case, similar as
-              PRIVATE_REPROCESSING, This capability requires the camera device to support the
-              following:
-
-              * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`.
-              * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input format, that is,
-                YUV_420_888 is included in the lists of formats returned by
-                {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and
-                {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}.
-              * {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput}
-                returns non-empty int[] for each supported input format returned by {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}.
-              * Each size returned by {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getInputSizes
-                getInputSizes(YUV_420_888)} is also included in {@link
-                android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes
-                getOutputSizes(YUV_420_888)}
-              * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate drop
-                relative to the sensor's maximum capture rate (at that resolution).
-              * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both
-                {@link android.graphics.ImageFormat#YUV_420_888} and {@link
-                android.graphics.ImageFormat#JPEG} formats.
-              * The maximum available resolution for {@link
-                android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the
-                maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams.
-              * Static metadata android.reprocess.maxCaptureStall.
-              * Only the below controls are effective for reprocessing requests and will be present
-                in capture results. The reprocess requests are from the original capture results that
-                are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888}
-                output buffers.  All other controls in the reprocess requests will be ignored by the
-                camera device.
-                    * android.jpeg.*
-                    * android.noiseReduction.mode
-                    * android.edge.mode
-                    * android.reprocess.effectiveExposureFactor
-              * android.noiseReduction.availableNoiseReductionModes and
-                android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode.
-              </notes>
-            </value>
-            <value optional="true">DEPTH_OUTPUT
-              <notes>
-              The camera device can produce depth measurements from its field of view.
-
-              This capability requires the camera device to support the following:
-
-              * {@link AIMAGE_FORMAT_DEPTH16} is supported as an output format.
-              * {@link AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is optionally supported as an
-                output format.
-              * This camera device, and all camera devices with the same android.lens.facing,
-                will list the following calibration entries in {@link ACameraMetadata} from both
-                {@link ACameraManager_getCameraCharacteristics} and
-                {@link ACameraCaptureSession_captureCallback_result}:
-                  - android.lens.poseTranslation
-                  - android.lens.poseRotation
-                  - android.lens.intrinsicCalibration
-                  - android.lens.radialDistortion
-              * The android.depth.depthIsExclusive entry is listed by this device.
-              * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
-                normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
-                format.
-
-              Generally, depth output operates at a slower frame rate than standard color capture,
-              so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
-              should be accounted for (see
-              {@link ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}).
-              On a device that supports both depth and color-based output, to enable smooth preview,
-              using a repeating burst is recommended, where a depth-output target is only included
-              once every N frames, where N is the ratio between preview output rate and depth output
-              rate, including depth stall time.
-              </notes>
-            </value>
-            <value optional="true" ndk_hidden="true">CONSTRAINED_HIGH_SPEED_VIDEO
-              <notes>
-              The device supports constrained high speed video recording (frame rate >=120fps)
-              use case. The camera device will support high speed capture session created by
-              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which
-              only accepts high speed request lists created by
-              {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
-
-              A camera device can still support high speed video streaming by advertising the high speed
-              FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all normal
-              capture request per frame control and synchronization requirements will apply to
-              the high speed fps ranges, the same as all other fps ranges. This capability describes
-              the capability of a specialized operating mode with many limitations (see below), which
-              is only targeted at high speed video recording.
-
-              The supported high speed video sizes and fps ranges are specified in
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
-              To get desired output frame rates, the application is only allowed to select video size
-              and FPS range combinations provided by
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
-              The fps range can be controlled via android.control.aeTargetFpsRange.
-
-              In this capability, the camera device will override aeMode, awbMode, and afMode to
-              ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
-              controls will be overridden to be FAST. Therefore, no manual control of capture
-              and post-processing parameters is possible. All other controls operate the
-              same as when android.control.mode == AUTO. This means that all other
-              android.control.* fields continue to work, such as
-
-              * android.control.aeTargetFpsRange
-              * android.control.aeExposureCompensation
-              * android.control.aeLock
-              * android.control.awbLock
-              * android.control.effectMode
-              * android.control.aeRegions
-              * android.control.afRegions
-              * android.control.awbRegions
-              * android.control.afTrigger
-              * android.control.aePrecaptureTrigger
-
-              Outside of android.control.*, the following controls will work:
-
-              * android.flash.mode (TORCH mode only, automatic flash for still capture will not
-              work since aeMode is ON)
-              * android.lens.opticalStabilizationMode (if it is supported)
-              * android.scaler.cropRegion
-              * android.statistics.faceDetectMode (if it is supported)
-
-              For high speed recording use case, the actual maximum supported frame rate may
-              be lower than what camera can output, depending on the destination Surfaces for
-              the image data. For example, if the destination surface is from video encoder,
-              the application need check if the video encoder is capable of supporting the
-              high frame rate for a given video size, or it will end up with lower recording
-              frame rate. If the destination surface is from preview window, the actual preview frame
-              rate will be bounded by the screen refresh rate.
-
-              The camera device will only support up to 2 high speed simultaneous output surfaces
-              (preview and recording surfaces)
-              in this mode. Above controls will be effective only if all of below conditions are true:
-
-              * The application creates a camera capture session with no more than 2 surfaces via
-              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The
-              targeted surfaces must be preview surface (either from
-              {@link android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or
-              recording surface(either from {@link android.media.MediaRecorder#getSurface} or
-              {@link android.media.MediaCodec#createInputSurface}).
-              * The stream sizes are selected from the sizes reported by
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}.
-              * The FPS ranges are selected from
-              {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}.
-
-              When above conditions are NOT satistied,
-              {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}
-              will fail.
-
-              Switching to a FPS range that has different maximum FPS may trigger some camera device
-              reconfigurations, which may introduce extra latency. It is recommended that
-              the application avoids unnecessary maximum target FPS changes as much as possible
-              during high speed streaming.
-              </notes>
-            </value>
-          </enum>
-          <description>List of capabilities that this camera device
-          advertises as fully supporting.</description>
-          <details>
-          A capability is a contract that the camera device makes in order
-          to be able to satisfy one or more use cases.
-
-          Listing a capability guarantees that the whole set of features
-          required to support a common use will all be available.
-
-          Using a subset of the functionality provided by an unsupported
-          capability may be possible on a specific camera device implementation;
-          to do this query each of android.request.availableRequestKeys,
-          android.request.availableResultKeys,
-          android.request.availableCharacteristicsKeys.
-
-          The following capabilities are guaranteed to be available on
-          android.info.supportedHardwareLevel `==` FULL devices:
-
-          * MANUAL_SENSOR
-          * MANUAL_POST_PROCESSING
-
-          Other capabilities may be available on either FULL or LIMITED
-          devices, but the application should query this key to be sure.
-          </details>
-          <hal_details>
-          Additional constraint details per-capability will be available
-          in the Compatibility Test Suite.
-
-          Minimum baseline requirements required for the
-          BACKWARD_COMPATIBLE capability are not explicitly listed.
-          Instead refer to "BC" tags and the camera CTS tests in the
-          android.hardware.camera2.cts package.
-
-          Listed controls that can be either request or result (e.g.
-          android.sensor.exposureTime) must be available both in the
-          request and the result in order to be considered to be
-          capability-compliant.
-
-          For example, if the HAL claims to support MANUAL control,
-          then exposure time must be configurable via the request _and_
-          the actual exposure applied must be available via
-          the result.
-
-          If MANUAL_SENSOR is omitted, the HAL may choose to omit the
-          android.scaler.availableMinFrameDurations static property entirely.
-
-          For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see
-          hardware/libhardware/include/hardware/camera3.h Section 10 for more information.
-
-          Devices that support the MANUAL_SENSOR capability must support the
-          CAMERA3_TEMPLATE_MANUAL template defined in camera3.h.
-
-          Devices that support the PRIVATE_REPROCESSING capability or the
-          YUV_REPROCESSING capability must support the
-          CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h.
-
-          For DEPTH_OUTPUT, the depth-format keys
-          android.depth.availableDepthStreamConfigurations,
-          android.depth.availableDepthMinFrameDurations,
-          android.depth.availableDepthStallDurations must be available, in
-          addition to the other keys explicitly mentioned in the DEPTH_OUTPUT
-          enum notes. The entry android.depth.maxDepthSamples must be available
-          if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace
-          DEPTH).
-          </hal_details>
-        </entry>
-        <entry name="availableRequestKeys" type="int32" visibility="ndk_public"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>A list of all keys that the camera device has available
-          to use with {@link ACaptureRequest}.</description>
-
-          <details>Attempting to set a key into a CaptureRequest that is not
-          listed here will result in an invalid request and will be rejected
-          by the camera device.
-
-          This field can be used to query the feature set of a camera device
-          at a more granular level than capabilities. This is especially
-          important for optional keys that are not listed under any capability
-          in android.request.availableCapabilities.
-          </details>
-          <hal_details>
-          Vendor tags can be listed here. Vendor tag metadata should also use
-          the extensions C api (refer to camera3.h for more details).
-
-          Setting/getting vendor tags will be checked against the metadata
-          vendor extensions API and not against this field.
-
-          The HAL must not consume any request tags that are not listed either
-          here or in the vendor tag list.
-
-          The public camera2 API will always make the vendor tags visible
-          via
-          {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}.
-          </hal_details>
-        </entry>
-        <entry name="availableResultKeys" type="int32" visibility="ndk_public"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>A list of all keys that the camera device has available
-          to query with {@link ACameraMetadata} from
-          {@link ACameraCaptureSession_captureCallback_result}.</description>
-
-          <details>Attempting to get a key from a CaptureResult that is not
-          listed here will always return a `null` value. Getting a key from
-          a CaptureResult that is listed here will generally never return a `null`
-          value.
-
-          The following keys may return `null` unless they are enabled:
-
-          * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON)
-
-          (Those sometimes-null keys will nevertheless be listed here
-          if they are available.)
-
-          This field can be used to query the feature set of a camera device
-          at a more granular level than capabilities. This is especially
-          important for optional keys that are not listed under any capability
-          in android.request.availableCapabilities.
-          </details>
-          <hal_details>
-          Tags listed here must always have an entry in the result metadata,
-          even if that size is 0 elements. Only array-type tags (e.g. lists,
-          matrices, strings) are allowed to have 0 elements.
-
-          Vendor tags can be listed here. Vendor tag metadata should also use
-          the extensions C api (refer to camera3.h for more details).
-
-          Setting/getting vendor tags will be checked against the metadata
-          vendor extensions API and not against this field.
-
-          The HAL must not produce any result tags that are not listed either
-          here or in the vendor tag list.
-
-          The public camera2 API will always make the vendor tags visible via {@link
-          android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}.
-          </hal_details>
-        </entry>
-        <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>A list of all keys that the camera device has available
-          to query with {@link ACameraMetadata} from
-          {@link ACameraManager_getCameraCharacteristics}.</description>
-          <details>This entry follows the same rules as
-          android.request.availableResultKeys (except that it applies for
-          CameraCharacteristics instead of CaptureResult). See above for more
-          details.
-          </details>
-          <hal_details>
-          Keys listed here must always have an entry in the static info metadata,
-          even if that size is 0 elements. Only array-type tags (e.g. lists,
-          matrices, strings) are allowed to have 0 elements.
-
-          Vendor tags can be listed here. Vendor tag metadata should also use
-          the extensions C api (refer to camera3.h for more details).
-
-          Setting/getting vendor tags will be checked against the metadata
-          vendor extensions API and not against this field.
-
-          The HAL must not have any tags in its static info that are not listed
-          either here or in the vendor tag list.
-
-          The public camera2 API will always make the vendor tags visible
-          via {@link android.hardware.camera2.CameraCharacteristics#getKeys}.
-          </hal_details>
-        </entry>
-      </static>
-    </section>
-    <section name="scaler">
-      <controls>
-        <entry name="cropRegion" type="int32" visibility="public"
-               container="array" typedef="rectangle" hwlevel="legacy">
-          <array>
-            <size>4</size>
-          </array>
-          <description>The desired region of the sensor to read out for this capture.</description>
-          <units>Pixel coordinates relative to
-          android.sensor.info.activeArraySize</units>
-          <details>
-            This control can be used to implement digital zoom.
-
-            The data representation is int[4], which maps to (left, top, width, height).
-
-            The crop region coordinate system is based off
-            android.sensor.info.activeArraySize, with `(0, 0)` being the
-            top-left corner of the sensor active array.
-
-            Output streams use this rectangle to produce their output,
-            cropping to a smaller region if necessary to maintain the
-            stream's aspect ratio, then scaling the sensor input to
-            match the output's configured resolution.
-
-            The crop region is applied after the RAW to other color
-            space (e.g. YUV) conversion. Since raw streams
-            (e.g. RAW16) don't have the conversion stage, they are not
-            croppable. The crop region will be ignored by raw streams.
-
-            For non-raw streams, any additional per-stream cropping will
-            be done to maximize the final pixel area of the stream.
-
-            For example, if the crop region is set to a 4:3 aspect
-            ratio, then 4:3 streams will use the exact crop
-            region. 16:9 streams will further crop vertically
-            (letterbox).
-
-            Conversely, if the crop region is set to a 16:9, then 4:3
-            outputs will crop horizontally (pillarbox), and 16:9
-            streams will match exactly. These additional crops will
-            be centered within the crop region.
-
-            The width and height of the crop region cannot
-            be set to be smaller than
-            `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and
-            `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively.
-
-            The camera device may adjust the crop region to account
-            for rounding and other hardware requirements; the final
-            crop region used will be included in the output capture
-            result.
-          </details>
-          <hal_details>
-            The output streams must maintain square pixels at all
-            times, no matter what the relative aspect ratios of the
-            crop region and the stream are.  Negative values for
-            corner are allowed for raw output if full pixel array is
-            larger than active pixel array. Width and height may be
-            rounded to nearest larger supportable width, especially
-            for raw output, where only a few fixed scales may be
-            possible.
-
-            For a set of output streams configured, if the sensor output is cropped to a smaller
-            size than active array size, the HAL need follow below cropping rules:
-
-            * The HAL need handle the cropRegion as if the sensor crop size is the effective active
-            array size.More specifically, the HAL must transform the request cropRegion from
-            android.sensor.info.activeArraySize to the sensor cropped pixel area size in this way:
-                1. Translate the requested cropRegion w.r.t., the left top corner of the sensor
-                cropped pixel area by (tx, ty),
-                where `tx = sensorCrop.top * (sensorCrop.height / activeArraySize.height)`
-                and `tx = sensorCrop.left * (sensorCrop.width / activeArraySize.width)`. The
-                (sensorCrop.top, sensorCrop.left) is the coordinate based off the
-                android.sensor.info.activeArraySize.
-                2. Scale the width and height of requested cropRegion with scaling factor of
-                sensorCrop.width/activeArraySize.width and sensorCrop.height/activeArraySize.height
-                respectively.
-            Once this new cropRegion is calculated, the HAL must use this region to crop the image
-            with regard to the sensor crop size (effective active array size). The HAL still need
-            follow the general cropping rule for this new cropRegion and effective active
-            array size.
-
-            * The HAL must report the cropRegion with regard to android.sensor.info.activeArraySize.
-            The HAL need convert the new cropRegion generated above w.r.t., full active array size.
-            The reported cropRegion may be slightly different with the requested cropRegion since
-            the HAL may adjust the crop region to account for rounding, conversion error, or other
-            hardware limitations.
-
-            HAL2.x uses only (x, y, width)
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-      </controls>
-      <static>
-        <entry name="availableFormats" type="int32"
-        visibility="hidden" deprecated="true" enum="true"
-        container="array" typedef="imageFormat">
-          <array>
-            <size>n</size>
-          </array>
-          <enum>
-            <value optional="true" id="0x20">RAW16
-              <notes>
-              RAW16 is a standard, cross-platform format for raw image
-              buffers with 16-bit pixels.
-
-              Buffers of this format are typically expected to have a
-              Bayer Color Filter Array (CFA) layout, which is given in
-              android.sensor.info.colorFilterArrangement. Sensors with
-              CFAs that are not representable by a format in
-              android.sensor.info.colorFilterArrangement should not
-              use this format.
-
-              Buffers of this format will also follow the constraints given for
-              RAW_OPAQUE buffers, but with relaxed performance constraints.
-
-              This format is intended to give users access to the full contents
-              of the buffers coming directly from the image sensor prior to any
-              cropping or scaling operations, and all coordinate systems for
-              metadata used for this format are relative to the size of the
-              active region of the image sensor before any geometric distortion
-              correction has been applied (i.e.
-              android.sensor.info.preCorrectionActiveArraySize). Supported
-              dimensions for this format are limited to the full dimensions of
-              the sensor (e.g. either android.sensor.info.pixelArraySize or
-              android.sensor.info.preCorrectionActiveArraySize will be the
-              only supported output size).
-
-              See android.scaler.availableInputOutputFormatsMap for
-              the full set of performance guarantees.
-              </notes>
-            </value>
-            <value optional="true" id="0x24">RAW_OPAQUE
-              <notes>
-              RAW_OPAQUE (or
-              {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}
-              as referred in public API) is a format for raw image buffers
-              coming from an image sensor.
-
-              The actual structure of buffers of this format is
-              platform-specific, but must follow several constraints:
-
-              1. No image post-processing operations may have been applied to
-              buffers of this type. These buffers contain raw image data coming
-              directly from the image sensor.
-              1. If a buffer of this format is passed to the camera device for
-              reprocessing, the resulting images will be identical to the images
-              produced if the buffer had come directly from the sensor and was
-              processed with the same settings.
-
-              The intended use for this format is to allow access to the native
-              raw format buffers coming directly from the camera sensor without
-              any additional conversions or decrease in framerate.
-
-              See android.scaler.availableInputOutputFormatsMap for the full set of
-              performance guarantees.
-              </notes>
-            </value>
-            <value optional="true" id="0x32315659">YV12
-              <notes>YCrCb 4:2:0 Planar</notes>
-            </value>
-            <value optional="true" id="0x11">YCrCb_420_SP
-              <notes>NV21</notes>
-            </value>
-            <value id="0x22">IMPLEMENTATION_DEFINED
-              <notes>System internal format, not application-accessible</notes>
-            </value>
-            <value id="0x23">YCbCr_420_888
-              <notes>Flexible YUV420 Format</notes>
-            </value>
-            <value id="0x21">BLOB
-              <notes>JPEG format</notes>
-            </value>
-          </enum>
-          <description>The list of image formats that are supported by this
-          camera device for output streams.</description>
-          <details>
-          All camera devices will support JPEG and YUV_420_888 formats.
-
-          When set to YUV_420_888, application can access the YUV420 data directly.
-          </details>
-          <hal_details>
-          These format values are from HAL_PIXEL_FORMAT_* in
-          system/core/include/system/graphics.h.
-
-          When IMPLEMENTATION_DEFINED is used, the platform
-          gralloc module will select a format based on the usage flags provided
-          by the camera HAL device and the other endpoint of the stream. It is
-          usually used by preview and recording streams, where the application doesn't
-          need access the image data.
-
-          YCbCr_420_888 format must be supported by the HAL. When an image stream
-          needs CPU/application direct access, this format will be used.
-
-          The BLOB format must be supported by the HAL. This is used for the JPEG stream.
-
-          A RAW_OPAQUE buffer should contain only pixel data. It is strongly
-          recommended that any information used by the camera device when
-          processing images is fully expressed by the result metadata
-          for that image buffer.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true"
-        container="array">
-          <array>
-            <size>n</size>
-          </array>
-          <description>The minimum frame duration that is supported
-          for each resolution in android.scaler.availableJpegSizes.
-          </description>
-          <units>Nanoseconds</units>
-          <range>TODO: Remove property.</range>
-          <details>
-          This corresponds to the minimum steady-state frame duration when only
-          that JPEG stream is active and captured in a burst, with all
-          processing (typically in android.*.mode) set to FAST.
-
-          When multiple streams are configured, the minimum
-          frame duration will be &amp;gt;= max(individual stream min
-          durations)</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableJpegSizes" type="int32" visibility="hidden"
-        deprecated="true" container="array" typedef="size">
-          <array>
-            <size>n</size>
-            <size>2</size>
-          </array>
-          <description>The JPEG resolutions that are supported by this camera device.</description>
-          <range>TODO: Remove property.</range>
-          <details>
-          The resolutions are listed as `(width, height)` pairs. All camera devices will support
-          sensor maximum resolution (defined by android.sensor.info.activeArraySize).
-          </details>
-          <hal_details>
-          The HAL must include sensor maximum resolution
-          (defined by android.sensor.info.activeArraySize),
-          and should include half/quarter of sensor maximum resolution.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableMaxDigitalZoom" type="float" visibility="public"
-              hwlevel="legacy">
-          <description>The maximum ratio between both active area width
-          and crop region width, and active area height and
-          crop region height, for android.scaler.cropRegion.
-          </description>
-          <units>Zoom scale factor</units>
-          <range>&amp;gt;=1</range>
-          <details>
-          This represents the maximum amount of zooming possible by
-          the camera device, or equivalently, the minimum cropping
-          window size.
-
-          Crop regions that have a width or height that is smaller
-          than this ratio allows will be rounded up to the minimum
-          allowed size by the camera device.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true"
-        container="array">
-          <array>
-            <size>n</size>
-          </array>
-          <description>For each available processed output size (defined in
-          android.scaler.availableProcessedSizes), this property lists the
-          minimum supportable frame duration for that size.
-          </description>
-          <units>Nanoseconds</units>
-          <details>
-          This should correspond to the frame duration when only that processed
-          stream is active, with all processing (typically in android.*.mode)
-          set to FAST.
-
-          When multiple streams are configured, the minimum frame duration will
-          be &amp;gt;= max(individual stream min durations).
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableProcessedSizes" type="int32" visibility="hidden"
-        deprecated="true" container="array" typedef="size">
-          <array>
-            <size>n</size>
-            <size>2</size>
-          </array>
-          <description>The resolutions available for use with
-          processed output streams, such as YV12, NV12, and
-          platform opaque YUV/RGB streams to the GPU or video
-          encoders.</description>
-          <details>
-          The resolutions are listed as `(width, height)` pairs.
-
-          For a given use case, the actual maximum supported resolution
-          may be lower than what is listed here, depending on the destination
-          Surface for the image data. For example, for recording video,
-          the video encoder chosen may have a maximum size limit (e.g. 1080p)
-          smaller than what the camera (e.g. maximum resolution is 3264x2448)
-          can provide.
-
-          Please reference the documentation for the image data destination to
-          check if it limits the maximum size for image data.
-          </details>
-          <hal_details>
-          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
-          the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes
-          and each below resolution if it is smaller than or equal to the sensor
-          maximum resolution (if they are not listed in JPEG sizes already):
-
-          * 240p (320 x 240)
-          * 480p (640 x 480)
-          * 720p (1280 x 720)
-          * 1080p (1920 x 1080)
-
-          For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`),
-          the HAL only has to list up to the maximum video size supported by the devices.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableRawMinDurations" type="int64" deprecated="true"
-        container="array">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          For each available raw output size (defined in
-          android.scaler.availableRawSizes), this property lists the minimum
-          supportable frame duration for that size.
-          </description>
-          <units>Nanoseconds</units>
-          <details>
-          Should correspond to the frame duration when only the raw stream is
-          active.
-
-          When multiple streams are configured, the minimum
-          frame duration will be &amp;gt;= max(individual stream min
-          durations)</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="availableRawSizes" type="int32" deprecated="true"
-        container="array" typedef="size">
-          <array>
-            <size>n</size>
-            <size>2</size>
-          </array>
-          <description>The resolutions available for use with raw
-          sensor output streams, listed as width,
-          height</description>
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.scaler.cropRegion" kind="controls">
-        </clone>
-      </dynamic>
-      <static>
-        <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden"
-          typedef="reprocessFormatsMap">
-          <description>The mapping of image formats that are supported by this
-          camera device for input streams, to their corresponding output formats.
-          </description>
-          <details>
-          All camera devices with at least 1
-          android.request.maxNumInputStreams will have at least one
-          available input format.
-
-          The camera device will support the following map of formats,
-          if its dependent capability (android.request.availableCapabilities) is supported:
-
-            Input Format                                    | Output Format                                     | Capability
-          :-------------------------------------------------|:--------------------------------------------------|:----------
-          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#JPEG}         | PRIVATE_REPROCESSING
-          {@link android.graphics.ImageFormat#PRIVATE}      | {@link android.graphics.ImageFormat#YUV_420_888}  | PRIVATE_REPROCESSING
-          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#JPEG}         | YUV_REPROCESSING
-          {@link android.graphics.ImageFormat#YUV_420_888}  | {@link android.graphics.ImageFormat#YUV_420_888}  | YUV_REPROCESSING
-
-          PRIVATE refers to a device-internal format that is not directly application-visible.  A
-          PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance}
-          with {@link android.graphics.ImageFormat#PRIVATE} as the format.
-
-          For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
-          or output will never hurt maximum frame rate (i.e.  {@link
-          android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration
-          getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),
-
-          Attempting to configure an input stream with output streams not
-          listed as available in this map is not valid.
-          </details>
-          <hal_details>
-          For the formats, see `system/core/include/system/graphics.h` for a definition
-          of the image format enumerations. The PRIVATE format refers to the
-          HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine
-          the actual format by using the gralloc usage flags.
-          For ZSL use case in particular, the HAL could choose appropriate format (partially
-          processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL.
-          See camera3.h for more details.
-
-          This value is encoded as a variable-size array-of-arrays.
-          The inner array always contains `[format, length, ...]` where
-          `...` has `length` elements. An inner array is followed by another
-          inner array if the total metadata entry size hasn't yet been exceeded.
-
-          A code sample to read/write this encoding (with a device that
-          supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG,
-          and reprocessing YUV_420_888 to YUV_420_888 and JPEG):
-
-              // reading
-              int32_t* contents = &amp;entry.i32[0];
-              for (size_t i = 0; i &lt; entry.count; ) {
-                  int32_t format = contents[i++];
-                  int32_t length = contents[i++];
-                  int32_t output_formats[length];
-                  memcpy(&amp;output_formats[0], &amp;contents[i],
-                         length * sizeof(int32_t));
-                  i += length;
-              }
-
-              // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING)
-              int32_t[] contents = {
-                IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB,
-                YUV_420_888, 2, YUV_420_888, BLOB,
-              };
-              update_camera_metadata_entry(metadata, index, &amp;contents[0],
-                    sizeof(contents)/sizeof(contents[0]), &amp;updated_entry);
-
-          If the HAL claims to support any of the capabilities listed in the
-          above details, then it must also support all the input-output
-          combinations listed for that capability. It can optionally support
-          additional formats if it so chooses.
-          </hal_details>
-          <tag id="REPROC" />
-        </entry>
-        <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public"
-               enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy">
-          <array>
-            <size>n</size>
-            <size>4</size>
-          </array>
-          <enum>
-            <value>OUTPUT</value>
-            <value>INPUT</value>
-          </enum>
-          <description>The available stream configurations that this
-          camera device supports
-          (i.e. format, width, height, output/input stream).
-          </description>
-          <details>
-          The configurations are listed as `(format, width, height, input?)`
-          tuples.
-
-          For a given use case, the actual maximum supported resolution
-          may be lower than what is listed here, depending on the destination
-          Surface for the image data. For example, for recording video,
-          the video encoder chosen may have a maximum size limit (e.g. 1080p)
-          smaller than what the camera (e.g. maximum resolution is 3264x2448)
-          can provide.
-
-          Please reference the documentation for the image data destination to
-          check if it limits the maximum size for image data.
-
-          Not all output formats may be supported in a configuration with
-          an input stream of a particular format. For more details, see
-          android.scaler.availableInputOutputFormatsMap.
-
-          The following table describes the minimum required output stream
-          configurations based on the hardware level
-          (android.info.supportedHardwareLevel):
-
-          Format         | Size                                         | Hardware Level | Notes
-          :-------------:|:--------------------------------------------:|:--------------:|:--------------:
-          JPEG           | android.sensor.info.activeArraySize          | Any            |
-          JPEG           | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
-          JPEG           | 1280x720 (720)                               | Any            | if 720p &lt;= activeArraySize
-          JPEG           | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
-          JPEG           | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
-          YUV_420_888    | all output sizes available for JPEG          | FULL           |
-          YUV_420_888    | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
-          IMPLEMENTATION_DEFINED | same as YUV_420_888                  | Any            |
-
-          Refer to android.request.availableCapabilities for additional
-          mandatory stream configurations on a per-capability basis.
-          </details>
-          <hal_details>
-          It is recommended (but not mandatory) to also include half/quarter
-          of sensor maximum resolution for JPEG formats (regardless of hardware
-          level).
-
-          (The following is a rewording of the above required table):
-
-          For JPEG format, the sizes may be restricted by below conditions:
-
-          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
-          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
-          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
-          it does not have to be included in the supported JPEG sizes.
-          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
-          the dimensions being a multiple of 16.
-
-          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
-          However, the largest JPEG size must be as close as possible to the sensor maximum
-          resolution given above constraints. It is required that after aspect ratio adjustments,
-          additional size reduction due to other issues must be less than 3% in area. For example,
-          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
-          ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
-          3264x2448.
-
-          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
-          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
-          here as output streams.
-
-          It must also include each below resolution if it is smaller than or
-          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
-          formats), as output streams:
-
-          * 240p (320 x 240)
-          * 480p (640 x 480)
-          * 720p (1280 x 720)
-          * 1080p (1920 x 1080)
-
-          For LIMITED capability devices
-          (`android.info.supportedHardwareLevel == LIMITED`),
-          the HAL only has to list up to the maximum video size
-          supported by the device.
-
-          Regardless of hardware level, every output resolution available for
-          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
-
-          This supercedes the following fields, which are now deprecated:
-
-          * availableFormats
-          * available[Processed,Raw,Jpeg]Sizes
-          </hal_details>
-        </entry>
-        <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public"
-               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
-          <array>
-            <size>4</size>
-            <size>n</size>
-          </array>
-          <description>This lists the minimum frame duration for each
-          format/size combination.
-          </description>
-          <units>(format, width, height, ns) x n</units>
-          <details>
-          This should correspond to the frame duration when only that
-          stream is active, with all processing (typically in android.*.mode)
-          set to either OFF or FAST.
-
-          When multiple streams are used in a request, the minimum frame
-          duration will be max(individual stream min durations).
-
-          The minimum frame duration of a stream (of a particular format, size)
-          is the same regardless of whether the stream is input or output.
-
-          See android.sensor.frameDuration and
-          android.scaler.availableStallDurations for more details about
-          calculating the max frame rate.
-          </details>
-          <tag id="V1" />
-        </entry>
-        <entry name="availableStallDurations" type="int64" visibility="ndk_public"
-               container="array" typedef="streamConfigurationDuration" hwlevel="legacy">
-          <array>
-            <size>4</size>
-            <size>n</size>
-          </array>
-          <description>This lists the maximum stall duration for each
-          output format/size combination.
-          </description>
-          <units>(format, width, height, ns) x n</units>
-          <details>
-          A stall duration is how much extra time would get added
-          to the normal minimum frame duration for a repeating request
-          that has streams with non-zero stall.
-
-          For example, consider JPEG captures which have the following
-          characteristics:
-
-          * JPEG streams act like processed YUV streams in requests for which
-          they are not included; in requests in which they are directly
-          referenced, they act as JPEG streams. This is because supporting a
-          JPEG stream requires the underlying YUV data to always be ready for
-          use by a JPEG encoder, but the encoder will only be used (and impact
-          frame duration) on requests that actually reference a JPEG stream.
-          * The JPEG processor can run concurrently to the rest of the camera
-          pipeline, but cannot process more than 1 capture at a time.
-
-          In other words, using a repeating YUV request would result
-          in a steady frame rate (let's say it's 30 FPS). If a single
-          JPEG request is submitted periodically, the frame rate will stay
-          at 30 FPS (as long as we wait for the previous JPEG to return each
-          time). If we try to submit a repeating YUV + JPEG request, then
-          the frame rate will drop from 30 FPS.
-
-          In general, submitting a new request with a non-0 stall time
-          stream will _not_ cause a frame rate drop unless there are still
-          outstanding buffers for that stream from previous requests.
-
-          Submitting a repeating request with streams (call this `S`)
-          is the same as setting the minimum frame duration from
-          the normal minimum frame duration corresponding to `S`, added with
-          the maximum stall duration for `S`.
-
-          If interleaving requests with and without a stall duration,
-          a request will stall by the maximum of the remaining times
-          for each can-stall stream with outstanding buffers.
-
-          This means that a stalling request will not have an exposure start
-          until the stall has completed.
-
-          This should correspond to the stall duration when only that stream is
-          active, with all processing (typically in android.*.mode) set to FAST
-          or OFF. Setting any of the processing modes to HIGH_QUALITY
-          effectively results in an indeterminate stall duration for all
-          streams in a request (the regular stall calculation rules are
-          ignored).
-
-          The following formats may always have a stall duration:
-
-          * {@link AIMAGE_FORMAT_JPEG}
-          * {@link AIMAGE_FORMAT_RAW16}
-
-          The following formats will never have a stall duration:
-
-          * {@link AIMAGE_FORMAT_YUV_420_888}
-          * {@link AIMAGE_FORMAT_RAW10}
-
-          All other formats may or may not have an allowed stall duration on
-          a per-capability basis; refer to android.request.availableCapabilities
-          for more details.
-
-          See android.sensor.frameDuration for more information about
-          calculating the max frame rate (absent stalls).
-          </details>
-          <hal_details>
-          If possible, it is recommended that all non-JPEG formats
-          (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE
-          and IMPLEMENTATION_DEFINED must not have stall durations.
-          </hal_details>
-          <tag id="V1" />
-        </entry>
-        <entry name="streamConfigurationMap" type="int32" visibility="java_public"
-               synthetic="true" typedef="streamConfigurationMap"
-               hwlevel="legacy">
-          <description>The available stream configurations that this
-          camera device supports; also includes the minimum frame durations
-          and the stall durations for each format/size combination.
-          </description>
-          <details>
-          All camera devices will support sensor maximum resolution (defined by
-          android.sensor.info.activeArraySize) for the JPEG format.
-
-          For a given use case, the actual maximum supported resolution
-          may be lower than what is listed here, depending on the destination
-          Surface for the image data. For example, for recording video,
-          the video encoder chosen may have a maximum size limit (e.g. 1080p)
-          smaller than what the camera (e.g. maximum resolution is 3264x2448)
-          can provide.
-
-          Please reference the documentation for the image data destination to
-          check if it limits the maximum size for image data.
-
-          The following table describes the minimum required output stream
-          configurations based on the hardware level
-          (android.info.supportedHardwareLevel):
-
-          Format                                             | Size                                         | Hardware Level | Notes
-          :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------:
-          {@link android.graphics.ImageFormat#JPEG}          | android.sensor.info.activeArraySize (*1)     | Any            |
-          {@link android.graphics.ImageFormat#JPEG}          | 1920x1080 (1080p)                            | Any            | if 1080p &lt;= activeArraySize
-          {@link android.graphics.ImageFormat#JPEG}          | 1280x720 (720p)                               | Any            | if 720p &lt;= activeArraySize
-          {@link android.graphics.ImageFormat#JPEG}          | 640x480 (480p)                               | Any            | if 480p &lt;= activeArraySize
-          {@link android.graphics.ImageFormat#JPEG}          | 320x240 (240p)                               | Any            | if 240p &lt;= activeArraySize
-          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG          | FULL           |
-          {@link android.graphics.ImageFormat#YUV_420_888}   | all output sizes available for JPEG, up to the maximum video size | LIMITED        |
-          {@link android.graphics.ImageFormat#PRIVATE}       | same as YUV_420_888                          | Any            |
-
-          Refer to android.request.availableCapabilities and {@link
-          android.hardware.camera2.CameraDevice#createCaptureSession} for additional mandatory
-          stream configurations on a per-capability basis.
-
-          *1: For JPEG format, the sizes may be restricted by below conditions:
-
-          * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
-          (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
-          (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these,
-          it does not have to be included in the supported JPEG sizes.
-          * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
-          the dimensions being a multiple of 16.
-          Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
-          However, the largest JPEG size will be as close as possible to the sensor maximum
-          resolution given above constraints. It is required that after aspect ratio adjustments,
-          additional size reduction due to other issues must be less than 3% in area. For example,
-          if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
-          ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
-          3264x2448.
-          </details>
-          <hal_details>
-          Do not set this property directly
-          (it is synthetic and will not be available at the HAL layer);
-          set the android.scaler.availableStreamConfigurations instead.
-
-          Not all output formats may be supported in a configuration with
-          an input stream of a particular format. For more details, see
-          android.scaler.availableInputOutputFormatsMap.
-
-          It is recommended (but not mandatory) to also include half/quarter
-          of sensor maximum resolution for JPEG formats (regardless of hardware
-          level).
-
-          (The following is a rewording of the above required table):
-
-          The HAL must include sensor maximum resolution (defined by
-          android.sensor.info.activeArraySize).
-
-          For FULL capability devices (`android.info.supportedHardwareLevel == FULL`),
-          the HAL must include all YUV_420_888 sizes that have JPEG sizes listed
-          here as output streams.
-
-          It must also include each below resolution if it is smaller than or
-          equal to the sensor maximum resolution (for both YUV_420_888 and JPEG
-          formats), as output streams:
-
-          * 240p (320 x 240)
-          * 480p (640 x 480)
-          * 720p (1280 x 720)
-          * 1080p (1920 x 1080)
-
-          For LIMITED capability devices
-          (`android.info.supportedHardwareLevel == LIMITED`),
-          the HAL only has to list up to the maximum video size
-          supported by the device.
-
-          Regardless of hardware level, every output resolution available for
-          YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.
-
-          This supercedes the following fields, which are now deprecated:
-
-          * availableFormats
-          * available[Processed,Raw,Jpeg]Sizes
-          </hal_details>
-        </entry>
-        <entry name="croppingType" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>CENTER_ONLY
-              <notes>
-                The camera device only supports centered crop regions.
-              </notes>
-            </value>
-            <value>FREEFORM
-              <notes>
-                The camera device supports arbitrarily chosen crop regions.
-              </notes>
-            </value>
-          </enum>
-          <description>The crop type that this camera device supports.</description>
-          <details>
-          When passing a non-centered crop region (android.scaler.cropRegion) to a camera
-          device that only supports CENTER_ONLY cropping, the camera device will move the
-          crop region to the center of the sensor active array (android.sensor.info.activeArraySize)
-          and keep the crop region width and height unchanged. The camera device will return the
-          final used crop region in metadata result android.scaler.cropRegion.
-
-          Camera devices that support FREEFORM cropping will support any crop region that
-          is inside of the active array. The camera device will apply the same crop region and
-          return the final used crop region in capture result metadata android.scaler.cropRegion.
-
-          LEGACY capability devices will only support CENTER_ONLY cropping.
-          </details>
-        </entry>
-      </static>
-    </section>
-    <section name="sensor">
-      <controls>
-        <entry name="exposureTime" type="int64" visibility="public" hwlevel="full">
-          <description>Duration each pixel is exposed to
-          light.</description>
-          <units>Nanoseconds</units>
-          <range>android.sensor.info.exposureTimeRange</range>
-          <details>If the sensor can't expose this exact duration, it will shorten the
-          duration exposed to the nearest possible value (rather than expose longer).
-          The final exposure time used will be available in the output capture result.
-
-          This control is only effective if android.control.aeMode or android.control.mode is set to
-          OFF; otherwise the auto-exposure algorithm will override this value.
-          </details>
-          <tag id="V1" />
-        </entry>
-        <entry name="frameDuration" type="int64" visibility="public" hwlevel="full">
-          <description>Duration from start of frame exposure to
-          start of next frame exposure.</description>
-          <units>Nanoseconds</units>
-          <range>See android.sensor.info.maxFrameDuration,
-          ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS. The duration
-          is capped to `max(duration, exposureTime + overhead)`.</range>
-          <details>
-          The maximum frame rate that can be supported by a camera subsystem is
-          a function of many factors:
-
-          * Requested resolutions of output image streams
-          * Availability of binning / skipping modes on the imager
-          * The bandwidth of the imager interface
-          * The bandwidth of the various ISP processing blocks
-
-          Since these factors can vary greatly between different ISPs and
-          sensors, the camera abstraction tries to represent the bandwidth
-          restrictions with as simple a model as possible.
-
-          The model presented has the following characteristics:
-
-          * The image sensor is always configured to output the smallest
-          resolution possible given the application's requested output stream
-          sizes.  The smallest resolution is defined as being at least as large
-          as the largest requested output stream size; the camera pipeline must
-          never digitally upsample sensor data when the crop region covers the
-          whole sensor. In general, this means that if only small output stream
-          resolutions are configured, the sensor can provide a higher frame
-          rate.
-          * Since any request may use any or all the currently configured
-          output streams, the sensor and ISP must be configured to support
-          scaling a single capture to all the streams at the same time.  This
-          means the camera pipeline must be ready to produce the largest
-          requested output size without any delay.  Therefore, the overall
-          frame rate of a given configured stream set is governed only by the
-          largest requested stream resolution.
-          * Using more than one output stream in a request does not affect the
-          frame duration.
-          * Certain format-streams may need to do additional background processing
-          before data is consumed/produced by that stream. These processors
-          can run concurrently to the rest of the camera pipeline, but
-          cannot process more than 1 capture at a time.
-
-          The necessary information for the application, given the model above,
-          is provided via
-          {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}.
-          These are used to determine the maximum frame rate / minimum frame
-          duration that is possible for a given stream configuration.
-
-          Specifically, the application can use the following rules to
-          determine the minimum frame duration it can request from the camera
-          device:
-
-          1. Let the set of currently configured input/output streams
-          be called `S`.
-          1. Find the minimum frame durations for each stream in `S`, by looking
-          it up in {@link ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
-          (with its respective size/format). Let this set of frame durations be
-          called `F`.
-          1. For any given request `R`, the minimum frame duration allowed
-          for `R` is the maximum out of all values in `F`. Let the streams
-          used in `R` be called `S_r`.
-
-          If none of the streams in `S_r` have a stall time (listed in {@link
-          ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}
-          using its respective size/format), then the frame duration in `F`
-          determines the steady state frame rate that the application will get
-          if it uses `R` as a repeating request. Let this special kind of
-          request be called `Rsimple`.
-
-          A repeating request `Rsimple` can be _occasionally_ interleaved
-          by a single capture of a new request `Rstall` (which has at least
-          one in-use stream with a non-0 stall time) and if `Rstall` has the
-          same minimum frame duration this will not cause a frame rate loss
-          if all buffers from the previous `Rstall` have already been
-          delivered.
-
-          For more details about stalling, see
-          {@link ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}.
-
-          This control is only effective if android.control.aeMode or android.control.mode is set to
-          OFF; otherwise the auto-exposure algorithm will override this value.
-          </details>
-          <hal_details>
-          For more details about stalling, see
-          android.scaler.availableStallDurations.
-          </hal_details>
-          <tag id="V1" />
-        </entry>
-        <entry name="sensitivity" type="int32" visibility="public" hwlevel="full">
-          <description>The amount of gain applied to sensor data
-          before processing.</description>
-          <units>ISO arithmetic units</units>
-          <range>android.sensor.info.sensitivityRange</range>
-          <details>
-          The sensitivity is the standard ISO sensitivity value,
-          as defined in ISO 12232:2006.
-
-          The sensitivity must be within android.sensor.info.sensitivityRange, and
-          if if it less than android.sensor.maxAnalogSensitivity, the camera device
-          is guaranteed to use only analog amplification for applying the gain.
-
-          If the camera device cannot apply the exact sensitivity
-          requested, it will reduce the gain to the nearest supported
-          value. The final sensitivity used will be available in the
-          output capture result.
-
-          This control is only effective if android.control.aeMode or android.control.mode is set to
-          OFF; otherwise the auto-exposure algorithm will override this value.
-          </details>
-          <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details>
-          <tag id="V1" />
-        </entry>
-      </controls>
-      <static>
-        <namespace name="info">
-          <entry name="activeArraySize" type="int32" visibility="public"
-          type_notes="Four ints defining the active pixel rectangle"
-          container="array" typedef="rectangle" hwlevel="legacy">
-            <array>
-              <size>4</size>
-            </array>
-            <description>
-            The area of the image sensor which corresponds to active pixels after any geometric
-            distortion correction has been applied.
-            </description>
-            <units>Pixel coordinates on the image sensor</units>
-            <details>
-            This is the rectangle representing the size of the active region of the sensor (i.e.
-            the region that actually receives light from the scene) after any geometric correction
-            has been applied, and should be treated as the maximum size in pixels of any of the
-            image output formats aside from the raw formats.
-
-            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
-            the full pixel array, and the size of the full pixel array is given by
-            android.sensor.info.pixelArraySize.
-
-            The data representation is int[4], which maps to (left, top, width, height).
-
-            The coordinate system for most other keys that list pixel coordinates, including
-            android.scaler.cropRegion, is defined relative to the active array rectangle given in
-            this field, with `(0, 0)` being the top-left of this rectangle.
-
-            The active array may be smaller than the full pixel array, since the full array may
-            include black calibration pixels or other inactive regions, and geometric correction
-            resulting in scaling or cropping may have been applied.
-            </details>
-            <hal_details>
-            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
-            &amp;gt;= `(0,0)`.
-            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
-            </hal_details>
-            <tag id="RAW" />
-          </entry>
-          <entry name="sensitivityRange" type="int32" visibility="public"
-          type_notes="Range of supported sensitivities"
-          container="array" typedef="rangeInt"
-          hwlevel="full">
-            <array>
-              <size>2</size>
-            </array>
-            <description>Range of sensitivities for android.sensor.sensitivity supported by this
-            camera device.</description>
-            <range>Min &lt;= 100, Max &amp;gt;= 800</range>
-            <details>
-              The values are the standard ISO sensitivity values,
-              as defined in ISO 12232:2006.
-            </details>
-
-            <tag id="BC" />
-            <tag id="V1" />
-          </entry>
-          <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true"
-            hwlevel="full">
-            <enum>
-              <value>RGGB</value>
-              <value>GRBG</value>
-              <value>GBRG</value>
-              <value>BGGR</value>
-              <value>RGB
-              <notes>Sensor is not Bayer; output has 3 16-bit
-              values for each pixel, instead of just 1 16-bit value
-              per pixel.</notes></value>
-            </enum>
-            <description>The arrangement of color filters on sensor;
-            represents the colors in the top-left 2x2 section of
-            the sensor, in reading order.</description>
-            <tag id="RAW" />
-          </entry>
-          <entry name="exposureTimeRange" type="int64" visibility="public"
-                 type_notes="nanoseconds" container="array" typedef="rangeLong"
-                 hwlevel="full">
-            <array>
-              <size>2</size>
-            </array>
-            <description>The range of image exposure times for android.sensor.exposureTime supported
-            by this camera device.
-            </description>
-            <units>Nanoseconds</units>
-            <range>The minimum exposure time will be less than 100 us. For FULL
-            capability devices (android.info.supportedHardwareLevel == FULL),
-            the maximum exposure time will be greater than 100ms.</range>
-            <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL),
-            The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least
-            100ms.
-            </hal_details>
-            <tag id="V1" />
-          </entry>
-          <entry name="maxFrameDuration" type="int64" visibility="public"
-                 hwlevel="full">
-            <description>The maximum possible frame duration (minimum frame rate) for
-            android.sensor.frameDuration that is supported this camera device.</description>
-            <units>Nanoseconds</units>
-            <range>For FULL capability devices
-            (android.info.supportedHardwareLevel == FULL), at least 100ms.
-            </range>
-            <details>Attempting to use frame durations beyond the maximum will result in the frame
-            duration being clipped to the maximum. See that control for a full definition of frame
-            durations.
-
-            Refer to {@link
-            ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}
-            for the minimum frame duration values.
-            </details>
-            <hal_details>
-            For FULL capability devices (android.info.supportedHardwareLevel == FULL),
-            The maximum of the range SHOULD be at least
-            1 second (1e9), MUST be at least 100ms (100e6).
-
-            android.sensor.info.maxFrameDuration must be greater or
-            equal to the android.sensor.info.exposureTimeRange max
-            value (since exposure time overrides frame duration).
-
-            Available minimum frame durations for JPEG must be no greater
-            than that of the YUV_420_888/IMPLEMENTATION_DEFINED
-            minimum frame durations (for that respective size).
-
-            Since JPEG processing is considered offline and can take longer than
-            a single uncompressed capture, refer to
-            android.scaler.availableStallDurations
-            for details about encoding this scenario.
-            </hal_details>
-            <tag id="V1" />
-          </entry>
-          <entry name="physicalSize" type="float" visibility="public"
-          type_notes="width x height"
-          container="array" typedef="sizeF" hwlevel="legacy">
-            <array>
-              <size>2</size>
-            </array>
-            <description>The physical dimensions of the full pixel
-            array.</description>
-            <units>Millimeters</units>
-            <details>This is the physical size of the sensor pixel
-            array defined by android.sensor.info.pixelArraySize.
-            </details>
-            <hal_details>Needed for FOV calculation for old API</hal_details>
-            <tag id="V1" />
-            <tag id="BC" />
-          </entry>
-          <entry name="pixelArraySize" type="int32" visibility="public"
-          container="array" typedef="size" hwlevel="legacy">
-            <array>
-              <size>2</size>
-            </array>
-            <description>Dimensions of the full pixel array, possibly
-            including black calibration pixels.</description>
-            <units>Pixels</units>
-            <details>The pixel count of the full pixel array of the image sensor, which covers
-            android.sensor.info.physicalSize area.  This represents the full pixel dimensions of
-            the raw buffers produced by this sensor.
-
-            If a camera device supports raw sensor formats, either this or
-            android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw
-            output formats listed in ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS (this depends on
-            whether or not the image sensor returns buffers containing pixels that are not
-            part of the active array region for blacklevel calibration or other purposes).
-
-            Some parts of the full pixel array may not receive light from the scene,
-            or be otherwise inactive.  The android.sensor.info.preCorrectionActiveArraySize key
-            defines the rectangle of active pixels that will be included in processed image
-            formats.
-            </details>
-            <tag id="RAW" />
-            <tag id="BC" />
-          </entry>
-          <entry name="whiteLevel" type="int32" visibility="public">
-            <description>
-            Maximum raw value output by sensor.
-            </description>
-            <range>&amp;gt; 255 (8-bit output)</range>
-            <details>
-            This specifies the fully-saturated encoding level for the raw
-            sample values from the sensor.  This is typically caused by the
-            sensor becoming highly non-linear or clipping. The minimum for
-            each channel is specified by the offset in the
-            android.sensor.blackLevelPattern key.
-
-            The white level is typically determined either by sensor bit depth
-            (8-14 bits is expected), or by the point where the sensor response
-            becomes too non-linear to be useful.  The default value for this is
-            maximum representable value for a 16-bit raw sample (2^16 - 1).
-
-            The white level values of captured images may vary for different
-            capture settings (e.g., android.sensor.sensitivity). This key
-            represents a coarse approximation for such case. It is recommended
-            to use android.sensor.dynamicWhiteLevel for captures when supported
-            by the camera device, which provides more accurate white level values.
-            </details>
-            <hal_details>
-            The full bit depth of the sensor must be available in the raw data,
-            so the value for linear sensors should not be significantly lower
-            than maximum raw value supported, i.e. 2^(sensor bits per pixel).
-            </hal_details>
-            <tag id="RAW" />
-          </entry>
-          <entry name="timestampSource" type="byte" visibility="public"
-                 enum="true" hwlevel="legacy">
-            <enum>
-              <value>UNKNOWN
-                <notes>
-                Timestamps from android.sensor.timestamp are in nanoseconds and monotonic,
-                but can not be compared to timestamps from other subsystems
-                (e.g. accelerometer, gyro etc.), or other instances of the same or different
-                camera devices in the same system. Timestamps between streams and results for
-                a single camera instance are comparable, and the timestamps for all buffers
-                and the result metadata generated by a single capture are identical.
-                </notes>
-              </value>
-              <value>REALTIME
-                <notes>
-                Timestamps from android.sensor.timestamp are in the same timebase as
-                [elapsedRealtimeNanos](https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos)
-                (or CLOCK_BOOTTIME), and they can be compared to other timestamps using that base.
-                </notes>
-              </value>
-            </enum>
-            <description>The time base source for sensor capture start timestamps.</description>
-            <details>
-            The timestamps provided for captures are always in nanoseconds and monotonic, but
-            may not based on a time source that can be compared to other system time sources.
-
-            This characteristic defines the source for the timestamps, and therefore whether they
-            can be compared against other system time sources/timestamps.
-            </details>
-          <tag id="V1" />
-        </entry>
-        <entry name="lensShadingApplied" type="byte" visibility="public" enum="true"
-               typedef="boolean">
-          <enum>
-            <value>FALSE</value>
-            <value>TRUE</value>
-          </enum>
-          <description>Whether the RAW images output from this camera device are subject to
-          lens shading correction.</description>
-          <details>
-          If TRUE, all images produced by the camera device in the RAW image formats will
-          have lens shading correction already applied to it. If FALSE, the images will
-          not be adjusted for lens shading correction.
-          See android.request.maxNumOutputRaw for a list of RAW image formats.
-
-          This key will be `null` for all devices do not report this information.
-          Devices with RAW capability will always report this information in this key.
-          </details>
-        </entry>
-        <entry name="preCorrectionActiveArraySize" type="int32" visibility="public"
-          type_notes="Four ints defining the active pixel rectangle" container="array"
-          typedef="rectangle" hwlevel="legacy">
-            <array>
-              <size>4</size>
-            </array>
-            <description>
-            The area of the image sensor which corresponds to active pixels prior to the
-            application of any geometric distortion correction.
-            </description>
-            <units>Pixel coordinates on the image sensor</units>
-            <details>
-            The data representation is int[4], which maps to (left, top, width, height).
-
-            This is the rectangle representing the size of the active region of the sensor (i.e.
-            the region that actually receives light from the scene) before any geometric correction
-            has been applied, and should be treated as the active region rectangle for any of the
-            raw formats.  All metadata associated with raw processing (e.g. the lens shading
-            correction map, and radial distortion fields) treats the top, left of this rectangle as
-            the origin, (0,0).
-
-            The size of this region determines the maximum field of view and the maximum number of
-            pixels that an image from this sensor can contain, prior to the application of
-            geometric distortion correction. The effective maximum pixel dimensions of a
-            post-distortion-corrected image is given by the android.sensor.info.activeArraySize
-            field, and the effective maximum field of view for a post-distortion-corrected image
-            can be calculated by applying the geometric distortion correction fields to this
-            rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize.
-
-            E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
-            dimensions in android.sensor.info.activeArraySize given the position of a pixel,
-            (x', y'), in the raw pixel array with dimensions give in
-            android.sensor.info.pixelArraySize:
-
-            1. Choose a pixel (x', y') within the active array region of the raw buffer given in
-            android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered
-            to be outside of the FOV, and will not be shown in the processed output image.
-            1. Apply geometric distortion correction to get the post-distortion pixel coordinate,
-            (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
-            buffers is defined relative to the top, left of the
-            android.sensor.info.preCorrectionActiveArraySize rectangle.
-            1. If the resulting corrected pixel coordinate is within the region given in
-            android.sensor.info.activeArraySize, then the position of this pixel in the
-            processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`,
-            when the top, left coordinate of that buffer is treated as (0, 0).
-
-            Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize
-            is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100),
-            android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion
-            correction doesn't change the pixel coordinate, the resulting pixel selected in
-            pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
-            with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5)
-            relative to the top,left of post-processed YUV output buffer with dimensions given in
-            android.sensor.info.activeArraySize.
-
-            The currently supported fields that correct for geometric distortion are:
-
-            1. android.lens.radialDistortion.
-
-            If all of the geometric distortion fields are no-ops, this rectangle will be the same
-            as the post-distortion-corrected rectangle given in
-            android.sensor.info.activeArraySize.
-
-            This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
-            the full pixel array, and the size of the full pixel array is given by
-            android.sensor.info.pixelArraySize.
-
-            The pre-correction active array may be smaller than the full pixel array, since the
-            full array may include black calibration pixels or other inactive regions.
-            </details>
-            <hal_details>
-            This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be
-            &amp;gt;= `(0,0)`.
-            The `(width, height)` must be &amp;lt;= `android.sensor.info.pixelArraySize`.
-
-            If omitted by the HAL implementation, the camera framework will assume that this is
-            the same as the post-correction active array region given in
-            android.sensor.info.activeArraySize.
-            </hal_details>
-            <tag id="RAW" />
-          </entry>
-        </namespace>
-        <entry name="referenceIlluminant1" type="byte" visibility="public"
-               enum="true">
-          <enum>
-            <value id="1">DAYLIGHT</value>
-            <value id="2">FLUORESCENT</value>
-            <value id="3">TUNGSTEN
-              <notes>Incandescent light</notes>
-            </value>
-            <value id="4">FLASH</value>
-            <value id="9">FINE_WEATHER</value>
-            <value id="10">CLOUDY_WEATHER</value>
-            <value id="11">SHADE</value>
-            <value id="12">DAYLIGHT_FLUORESCENT
-              <notes>D 5700 - 7100K</notes>
-            </value>
-            <value id="13">DAY_WHITE_FLUORESCENT
-              <notes>N 4600 - 5400K</notes>
-            </value>
-            <value id="14">COOL_WHITE_FLUORESCENT
-              <notes>W 3900 - 4500K</notes>
-            </value>
-            <value id="15">WHITE_FLUORESCENT
-              <notes>WW 3200 - 3700K</notes>
-            </value>
-            <value id="17">STANDARD_A</value>
-            <value id="18">STANDARD_B</value>
-            <value id="19">STANDARD_C</value>
-            <value id="20">D55</value>
-            <value id="21">D65</value>
-            <value id="22">D75</value>
-            <value id="23">D50</value>
-            <value id="24">ISO_STUDIO_TUNGSTEN</value>
-          </enum>
-          <description>
-          The standard reference illuminant used as the scene light source when
-          calculating the android.sensor.colorTransform1,
-          android.sensor.calibrationTransform1, and
-          android.sensor.forwardMatrix1 matrices.
-          </description>
-          <details>
-          The values in this key correspond to the values defined for the
-          EXIF LightSource tag. These illuminants are standard light sources
-          that are often used calibrating camera devices.
-
-          If this key is present, then android.sensor.colorTransform1,
-          android.sensor.calibrationTransform1, and
-          android.sensor.forwardMatrix1 will also be present.
-
-          Some devices may choose to provide a second set of calibration
-          information for improved quality, including
-          android.sensor.referenceIlluminant2 and its corresponding matrices.
-          </details>
-          <hal_details>
-          The first reference illuminant (android.sensor.referenceIlluminant1)
-          and corresponding matrices must be present to support the RAW capability
-          and DNG output.
-
-          When producing raw images with a color profile that has only been
-          calibrated against a single light source, it is valid to omit
-          android.sensor.referenceIlluminant2 along with the
-          android.sensor.colorTransform2, android.sensor.calibrationTransform2,
-          and android.sensor.forwardMatrix2 matrices.
-
-          If only android.sensor.referenceIlluminant1 is included, it should be
-          chosen so that it is representative of typical scene lighting.  In
-          general, D50 or DAYLIGHT will be chosen for this case.
-
-          If both android.sensor.referenceIlluminant1 and
-          android.sensor.referenceIlluminant2 are included, they should be
-          chosen to represent the typical range of scene lighting conditions.
-          In general, low color temperature illuminant such as Standard-A will
-          be chosen for the first reference illuminant and a higher color
-          temperature illuminant such as D65 will be chosen for the second
-          reference illuminant.
-          </hal_details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="referenceIlluminant2" type="byte" visibility="public">
-          <description>
-          The standard reference illuminant used as the scene light source when
-          calculating the android.sensor.colorTransform2,
-          android.sensor.calibrationTransform2, and
-          android.sensor.forwardMatrix2 matrices.
-          </description>
-          <range>Any value listed in android.sensor.referenceIlluminant1</range>
-          <details>
-          See android.sensor.referenceIlluminant1 for more details.
-
-          If this key is present, then android.sensor.colorTransform2,
-          android.sensor.calibrationTransform2, and
-          android.sensor.forwardMatrix2 will also be present.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="calibrationTransform1" type="rational"
-        visibility="public" optional="true"
-        type_notes="3x3 matrix in row-major-order" container="array"
-        typedef="colorSpaceTransform">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>
-          A per-device calibration transform matrix that maps from the
-          reference sensor colorspace to the actual device sensor colorspace.
-          </description>
-          <details>
-          This matrix is used to correct for per-device variations in the
-          sensor colorspace, and is used for processing raw buffer data.
-
-          The matrix is expressed as a 3x3 matrix in row-major-order, and
-          contains a per-device calibration transform that maps colors
-          from reference sensor color space (i.e. the "golden module"
-          colorspace) into this camera device's native sensor color
-          space under the first reference illuminant
-          (android.sensor.referenceIlluminant1).
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="calibrationTransform2" type="rational"
-        visibility="public" optional="true"
-        type_notes="3x3 matrix in row-major-order" container="array"
-        typedef="colorSpaceTransform">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>
-          A per-device calibration transform matrix that maps from the
-          reference sensor colorspace to the actual device sensor colorspace
-          (this is the colorspace of the raw buffer data).
-          </description>
-          <details>
-          This matrix is used to correct for per-device variations in the
-          sensor colorspace, and is used for processing raw buffer data.
-
-          The matrix is expressed as a 3x3 matrix in row-major-order, and
-          contains a per-device calibration transform that maps colors
-          from reference sensor color space (i.e. the "golden module"
-          colorspace) into this camera device's native sensor color
-          space under the second reference illuminant
-          (android.sensor.referenceIlluminant2).
-
-          This matrix will only be present if the second reference
-          illuminant is present.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="colorTransform1" type="rational"
-        visibility="public" optional="true"
-        type_notes="3x3 matrix in row-major-order" container="array"
-        typedef="colorSpaceTransform">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>
-          A matrix that transforms color values from CIE XYZ color space to
-          reference sensor color space.
-          </description>
-          <details>
-          This matrix is used to convert from the standard CIE XYZ color
-          space to the reference sensor colorspace, and is used when processing
-          raw buffer data.
-
-          The matrix is expressed as a 3x3 matrix in row-major-order, and
-          contains a color transform matrix that maps colors from the CIE
-          XYZ color space to the reference sensor color space (i.e. the
-          "golden module" colorspace) under the first reference illuminant
-          (android.sensor.referenceIlluminant1).
-
-          The white points chosen in both the reference sensor color space
-          and the CIE XYZ colorspace when calculating this transform will
-          match the standard white point for the first reference illuminant
-          (i.e. no chromatic adaptation will be applied by this transform).
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="colorTransform2" type="rational"
-        visibility="public" optional="true"
-        type_notes="3x3 matrix in row-major-order" container="array"
-        typedef="colorSpaceTransform">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>
-          A matrix that transforms color values from CIE XYZ color space to
-          reference sensor color space.
-          </description>
-          <details>
-          This matrix is used to convert from the standard CIE XYZ color
-          space to the reference sensor colorspace, and is used when processing
-          raw buffer data.
-
-          The matrix is expressed as a 3x3 matrix in row-major-order, and
-          contains a color transform matrix that maps colors from the CIE
-          XYZ color space to the reference sensor color space (i.e. the
-          "golden module" colorspace) under the second reference illuminant
-          (android.sensor.referenceIlluminant2).
-
-          The white points chosen in both the reference sensor color space
-          and the CIE XYZ colorspace when calculating this transform will
-          match the standard white point for the second reference illuminant
-          (i.e. no chromatic adaptation will be applied by this transform).
-
-          This matrix will only be present if the second reference
-          illuminant is present.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="forwardMatrix1" type="rational"
-        visibility="public" optional="true"
-        type_notes="3x3 matrix in row-major-order" container="array"
-        typedef="colorSpaceTransform">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>
-          A matrix that transforms white balanced camera colors from the reference
-          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
-          </description>
-          <details>
-          This matrix is used to convert to the standard CIE XYZ colorspace, and
-          is used when processing raw buffer data.
-
-          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
-          a color transform matrix that maps white balanced colors from the
-          reference sensor color space to the CIE XYZ color space with a D50 white
-          point.
-
-          Under the first reference illuminant (android.sensor.referenceIlluminant1)
-          this matrix is chosen so that the standard white point for this reference
-          illuminant in the reference sensor colorspace is mapped to D50 in the
-          CIE XYZ colorspace.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="forwardMatrix2" type="rational"
-        visibility="public" optional="true"
-        type_notes="3x3 matrix in row-major-order" container="array"
-        typedef="colorSpaceTransform">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>
-          A matrix that transforms white balanced camera colors from the reference
-          sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.
-          </description>
-          <details>
-          This matrix is used to convert to the standard CIE XYZ colorspace, and
-          is used when processing raw buffer data.
-
-          This matrix is expressed as a 3x3 matrix in row-major-order, and contains
-          a color transform matrix that maps white balanced colors from the
-          reference sensor color space to the CIE XYZ color space with a D50 white
-          point.
-
-          Under the second reference illuminant (android.sensor.referenceIlluminant2)
-          this matrix is chosen so that the standard white point for this reference
-          illuminant in the reference sensor colorspace is mapped to D50 in the
-          CIE XYZ colorspace.
-
-          This matrix will only be present if the second reference
-          illuminant is present.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="baseGainFactor" type="rational"
-        optional="true">
-          <description>Gain factor from electrons to raw units when
-          ISO=100</description>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="blackLevelPattern" type="int32" visibility="public"
-        optional="true" type_notes="2x2 raw count block" container="array"
-        typedef="blackLevelPattern">
-          <array>
-            <size>4</size>
-          </array>
-          <description>
-          A fixed black level offset for each of the color filter arrangement
-          (CFA) mosaic channels.
-          </description>
-          <range>&amp;gt;= 0 for each.</range>
-          <details>
-          This key specifies the zero light value for each of the CFA mosaic
-          channels in the camera sensor.  The maximal value output by the
-          sensor is represented by the value in android.sensor.info.whiteLevel.
-
-          The values are given in the same order as channels listed for the CFA
-          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
-          nth value given corresponds to the black level offset for the nth
-          color channel listed in the CFA.
-
-          The black level values of captured images may vary for different
-          capture settings (e.g., android.sensor.sensitivity). This key
-          represents a coarse approximation for such case. It is recommended to
-          use android.sensor.dynamicBlackLevel or use pixels from
-          android.sensor.opticalBlackRegions directly for captures when
-          supported by the camera device, which provides more accurate black
-          level values. For raw capture in particular, it is recommended to use
-          pixels from android.sensor.opticalBlackRegions to calculate black
-          level values for each frame.
-          </details>
-          <hal_details>
-          The values are given in row-column scan order, with the first value
-          corresponding to the element of the CFA in row=0, column=0.
-          </hal_details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="maxAnalogSensitivity" type="int32" visibility="public"
-               optional="true" hwlevel="full">
-          <description>Maximum sensitivity that is implemented
-          purely through analog gain.</description>
-          <details>For android.sensor.sensitivity values less than or
-          equal to this, all applied gain must be analog. For
-          values above this, the gain applied can be a mix of analog and
-          digital.</details>
-          <tag id="V1" />
-          <tag id="FULL" />
-        </entry>
-        <entry name="orientation" type="int32" visibility="public"
-               hwlevel="legacy">
-          <description>Clockwise angle through which the output image needs to be rotated to be
-          upright on the device screen in its native orientation.
-          </description>
-          <units>Degrees of clockwise rotation; always a multiple of
-          90</units>
-          <range>0, 90, 180, 270</range>
-          <details>
-          Also defines the direction of rolling shutter readout, which is from top to bottom in
-          the sensor's coordinate system.
-          </details>
-          <tag id="BC" />
-        </entry>
-        <entry name="profileHueSatMapDimensions" type="int32"
-        visibility="system" optional="true"
-        type_notes="Number of samples for hue, saturation, and value"
-        container="array">
-          <array>
-            <size>3</size>
-          </array>
-          <description>
-          The number of input samples for each dimension of
-          android.sensor.profileHueSatMap.
-          </description>
-          <range>
-          Hue &amp;gt;= 1,
-          Saturation &amp;gt;= 2,
-          Value &amp;gt;= 1
-          </range>
-          <details>
-          The number of input samples for the hue, saturation, and value
-          dimension of android.sensor.profileHueSatMap. The order of the
-          dimensions given is hue, saturation, value; where hue is the 0th
-          element.
-          </details>
-          <tag id="RAW" />
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.sensor.exposureTime" kind="controls">
-        </clone>
-        <clone entry="android.sensor.frameDuration"
-        kind="controls"></clone>
-        <clone entry="android.sensor.sensitivity" kind="controls">
-        </clone>
-        <entry name="timestamp" type="int64" visibility="public"
-               hwlevel="legacy">
-          <description>Time at start of exposure of first
-          row of the image sensor active array, in nanoseconds.</description>
-          <units>Nanoseconds</units>
-          <range>&amp;gt; 0</range>
-          <details>The timestamps are also included in all image
-          buffers produced for the same capture, and will be identical
-          on all the outputs.
-
-          When android.sensor.info.timestampSource `==` UNKNOWN,
-          the timestamps measure time since an unspecified starting point,
-          and are monotonically increasing. They can be compared with the
-          timestamps for other captures from the same camera device, but are
-          not guaranteed to be comparable to any other time source.
-
-          When android.sensor.info.timestampSource `==` REALTIME, the
-          timestamps measure time in the same timebase as
-          [elapsedRealtimeNanos](https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos)
-          (or CLOCK_BOOTTIME), and they can
-          be compared to other timestamps from other subsystems that
-          are using that base.
-
-          For reprocessing, the timestamp will match the start of exposure of
-          the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
-          timestamp} in the TotalCaptureResult that was used to create the
-          reprocess capture request.
-          </details>
-          <hal_details>
-          All timestamps must be in reference to the kernel's
-          CLOCK_BOOTTIME monotonic clock, which properly accounts for
-          time spent asleep. This allows for synchronization with
-          sensors that continue to operate while the system is
-          otherwise asleep.
-
-          If android.sensor.info.timestampSource `==` REALTIME,
-          The timestamp must be synchronized with the timestamps from other
-          sensor subsystems that are using the same timebase.
-
-          For reprocessing, the input image's start of exposure can be looked up
-          with android.sensor.timestamp from the metadata included in the
-          capture request.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="temperature" type="float"
-        optional="true">
-          <description>The temperature of the sensor, sampled at the time
-          exposure began for this frame.
-
-          The thermal diode being queried should be inside the sensor PCB, or
-          somewhere close to it.
-          </description>
-
-          <units>Celsius</units>
-          <range>Optional. This value is missing if no temperature is available.</range>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="neutralColorPoint" type="rational" visibility="public"
-        optional="true" container="array">
-          <array>
-            <size>3</size>
-          </array>
-          <description>
-          The estimated camera neutral color in the native sensor colorspace at
-          the time of capture.
-          </description>
-          <details>
-          This value gives the neutral color point encoded as an RGB value in the
-          native sensor color space.  The neutral color point indicates the
-          currently estimated white point of the scene illumination.  It can be
-          used to interpolate between the provided color transforms when
-          processing raw sensor data.
-
-          The order of the values is R, G, B; where R is in the lowest index.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="noiseProfile" type="double" visibility="public"
-        optional="true" type_notes="Pairs of noise model coefficients"
-        container="array" typedef="pairDoubleDouble">
-          <array>
-            <size>2</size>
-            <size>CFA Channels</size>
-          </array>
-          <description>
-          Noise model coefficients for each CFA mosaic channel.
-          </description>
-          <details>
-          This key contains two noise model coefficients for each CFA channel
-          corresponding to the sensor amplification (S) and sensor readout
-          noise (O).  These are given as pairs of coefficients for each channel
-          in the same order as channels listed for the CFA layout key
-          (see android.sensor.info.colorFilterArrangement).  This is
-          represented as an array of Pair&amp;lt;Double, Double&amp;gt;, where
-          the first member of the Pair at index n is the S coefficient and the
-          second member is the O coefficient for the nth color channel in the CFA.
-
-          These coefficients are used in a two parameter noise model to describe
-          the amount of noise present in the image for each CFA channel.  The
-          noise model used here is:
-
-          N(x) = sqrt(Sx + O)
-
-          Where x represents the recorded signal of a CFA channel normalized to
-          the range [0, 1], and S and O are the noise model coeffiecients for
-          that channel.
-
-          A more detailed description of the noise model can be found in the
-          Adobe DNG specification for the NoiseProfile tag.
-          </details>
-          <hal_details>
-          For a CFA layout of RGGB, the list of coefficients would be given as
-          an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients
-          for the red channel, S1 and O1 are the coefficients for the first green
-          channel, etc.
-          </hal_details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="profileHueSatMap" type="float"
-        visibility="system" optional="true"
-        type_notes="Mapping for hue, saturation, and value"
-        container="array">
-          <array>
-            <size>hue_samples</size>
-            <size>saturation_samples</size>
-            <size>value_samples</size>
-            <size>3</size>
-          </array>
-          <description>
-          A mapping containing a hue shift, saturation scale, and value scale
-          for each pixel.
-          </description>
-          <units>
-          The hue shift is given in degrees; saturation and value scale factors are
-          unitless and are between 0 and 1 inclusive
-          </units>
-          <details>
-          hue_samples, saturation_samples, and value_samples are given in
-          android.sensor.profileHueSatMapDimensions.
-
-          Each entry of this map contains three floats corresponding to the
-          hue shift, saturation scale, and value scale, respectively; where the
-          hue shift has the lowest index. The map entries are stored in the key
-          in nested loop order, with the value divisions in the outer loop, the
-          hue divisions in the middle loop, and the saturation divisions in the
-          inner loop. All zero input saturation entries are required to have a
-          value scale factor of 1.0.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="profileToneCurve" type="float"
-        visibility="system" optional="true"
-        type_notes="Samples defining a spline for a tone-mapping curve"
-        container="array">
-          <array>
-            <size>samples</size>
-            <size>2</size>
-          </array>
-          <description>
-          A list of x,y samples defining a tone-mapping curve for gamma adjustment.
-          </description>
-          <range>
-          Each sample has an input range of `[0, 1]` and an output range of
-          `[0, 1]`.  The first sample is required to be `(0, 0)`, and the last
-          sample is required to be `(1, 1)`.
-          </range>
-          <details>
-          This key contains a default tone curve that can be applied while
-          processing the image as a starting point for user adjustments.
-          The curve is specified as a list of value pairs in linear gamma.
-          The curve is interpolated using a cubic spline.
-          </details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="greenSplit" type="float" visibility="public" optional="true">
-          <description>
-          The worst-case divergence between Bayer green channels.
-          </description>
-          <range>
-          &amp;gt;= 0
-          </range>
-          <details>
-          This value is an estimate of the worst case split between the
-          Bayer green channels in the red and blue rows in the sensor color
-          filter array.
-
-          The green split is calculated as follows:
-
-          1. A 5x5 pixel (or larger) window W within the active sensor array is
-          chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
-          mosaic channels (R, Gr, Gb, B).  The location and size of the window
-          chosen is implementation defined, and should be chosen to provide a
-          green split estimate that is both representative of the entire image
-          for this camera sensor, and can be calculated quickly.
-          1. The arithmetic mean of the green channels from the red
-          rows (mean_Gr) within W is computed.
-          1. The arithmetic mean of the green channels from the blue
-          rows (mean_Gb) within W is computed.
-          1. The maximum ratio R of the two means is computed as follows:
-          `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))`
-
-          The ratio R is the green split divergence reported for this property,
-          which represents how much the green channels differ in the mosaic
-          pattern.  This value is typically used to determine the treatment of
-          the green mosaic channels when demosaicing.
-
-          The green split value can be roughly interpreted as follows:
-
-          * R &amp;lt; 1.03 is a negligible split (&amp;lt;3% divergence).
-          * 1.20 &amp;lt;= R &amp;gt;= 1.03 will require some software
-          correction to avoid demosaic errors (3-20% divergence).
-          * R &amp;gt; 1.20 will require strong software correction to produce
-          a usuable image (&amp;gt;20% divergence).
-          </details>
-          <hal_details>
-          The green split given may be a static value based on prior
-          characterization of the camera sensor using the green split
-          calculation method given here over a large, representative, sample
-          set of images.  Other methods of calculation that produce equivalent
-          results, and can be interpreted in the same manner, may be used.
-          </hal_details>
-          <tag id="RAW" />
-        </entry>
-      </dynamic>
-      <controls>
-        <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array">
-          <array>
-            <size>4</size>
-          </array>
-          <description>
-            A pixel `[R, G_even, G_odd, B]` that supplies the test pattern
-            when android.sensor.testPatternMode is SOLID_COLOR.
-          </description>
-          <details>
-          Each color channel is treated as an unsigned 32-bit integer.
-          The camera device then uses the most significant X bits
-          that correspond to how many bits are in its Bayer raw sensor
-          output.
-
-          For example, a sensor with RAW10 Bayer output would use the
-          10 most significant bits from each color channel.
-          </details>
-          <hal_details>
-          </hal_details>
-        </entry>
-        <entry name="testPatternMode" type="int32" visibility="public" optional="true"
-          enum="true">
-          <enum>
-            <value>OFF
-              <notes>No test pattern mode is used, and the camera
-              device returns captures from the image sensor.
-
-              This is the default if the key is not set.</notes>
-            </value>
-            <value>SOLID_COLOR
-              <notes>
-              Each pixel in `[R, G_even, G_odd, B]` is replaced by its
-              respective color channel provided in
-              android.sensor.testPatternData.
-
-              For example:
-
-                  android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
-
-              All green pixels are 100% green. All red/blue pixels are black.
-
-                  android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
-
-              All red pixels are 100% red. Only the odd green pixels
-              are 100% green. All blue pixels are 100% black.
-              </notes>
-            </value>
-            <value>COLOR_BARS
-              <notes>
-              All pixel data is replaced with an 8-bar color pattern.
-
-              The vertical bars (left-to-right) are as follows:
-
-              * 100% white
-              * yellow
-              * cyan
-              * green
-              * magenta
-              * red
-              * blue
-              * black
-
-              In general the image would look like the following:
-
-                 W Y C G M R B K
-                 W Y C G M R B K
-                 W Y C G M R B K
-                 W Y C G M R B K
-                 W Y C G M R B K
-                 . . . . . . . .
-                 . . . . . . . .
-                 . . . . . . . .
-
-                 (B = Blue, K = Black)
-
-             Each bar should take up 1/8 of the sensor pixel array width.
-             When this is not possible, the bar size should be rounded
-             down to the nearest integer and the pattern can repeat
-             on the right side.
-
-             Each bar's height must always take up the full sensor
-             pixel array height.
-
-             Each pixel in this test pattern must be set to either
-             0% intensity or 100% intensity.
-             </notes>
-            </value>
-            <value>COLOR_BARS_FADE_TO_GRAY
-              <notes>
-              The test pattern is similar to COLOR_BARS, except that
-              each bar should start at its specified color at the top,
-              and fade to gray at the bottom.
-
-              Furthermore each bar is further subdivided into a left and
-              right half. The left half should have a smooth gradient,
-              and the right half should have a quantized gradient.
-
-              In particular, the right half's should consist of blocks of the
-              same color for 1/16th active sensor pixel array width.
-
-              The least significant bits in the quantized gradient should
-              be copied from the most significant bits of the smooth gradient.
-
-              The height of each bar should always be a multiple of 128.
-              When this is not the case, the pattern should repeat at the bottom
-              of the image.
-              </notes>
-            </value>
-            <value>PN9
-              <notes>
-              All pixel data is replaced by a pseudo-random sequence
-              generated from a PN9 512-bit sequence (typically implemented
-              in hardware with a linear feedback shift register).
-
-              The generator should be reset at the beginning of each frame,
-              and thus each subsequent raw frame with this test pattern should
-              be exactly the same as the last.
-              </notes>
-            </value>
-            <value id="256">CUSTOM1
-              <notes>The first custom test pattern. All custom patterns that are
-              available only on this camera device are at least this numeric
-              value.
-
-              All of the custom test patterns will be static
-              (that is the raw image must not vary from frame to frame).
-              </notes>
-            </value>
-          </enum>
-          <description>When enabled, the sensor sends a test pattern instead of
-          doing a real exposure from the camera.
-          </description>
-          <range>android.sensor.availableTestPatternModes</range>
-          <details>
-          When a test pattern is enabled, all manual sensor controls specified
-          by android.sensor.* will be ignored. All other controls should
-          work as normal.
-
-          For example, if manual flash is enabled, flash firing should still
-          occur (and that the test pattern remain unmodified, since the flash
-          would not actually affect it).
-
-          Defaults to OFF.
-          </details>
-          <hal_details>
-          All test patterns are specified in the Bayer domain.
-
-          The HAL may choose to substitute test patterns from the sensor
-          with test patterns from on-device memory. In that case, it should be
-          indistinguishable to the ISP whether the data came from the
-          sensor interconnect bus (such as CSI2) or memory.
-          </hal_details>
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.sensor.testPatternData" kind="controls">
-        </clone>
-        <clone entry="android.sensor.testPatternMode" kind="controls">
-        </clone>
-      </dynamic>
-      <static>
-        <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true"
-          type_notes="list of enums" container="array">
-          <array>
-            <size>n</size>
-          </array>
-          <description>List of sensor test pattern modes for android.sensor.testPatternMode
-          supported by this camera device.
-          </description>
-          <range>Any value listed in android.sensor.testPatternMode</range>
-          <details>
-            Defaults to OFF, and always includes OFF if defined.
-          </details>
-          <hal_details>
-            All custom modes must be >= CUSTOM1.
-          </hal_details>
-        </entry>
-      </static>
-      <dynamic>
-        <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited">
-          <description>Duration between the start of first row exposure
-          and the start of last row exposure.</description>
-          <units>Nanoseconds</units>
-          <range> &amp;gt;= 0 and &amp;lt;
-          {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range>
-          <details>
-          This is the exposure time skew between the first and last
-          row exposure start times. The first row and the last row are
-          the first and last rows inside of the
-          android.sensor.info.activeArraySize.
-
-          For typical camera sensors that use rolling shutters, this is also equivalent
-          to the frame readout time.
-          </details>
-          <hal_details>
-          The HAL must report `0` if the sensor is using global shutter, where all pixels begin
-          exposure at the same time.
-          </hal_details>
-          <tag id="V1" />
-        </entry>
-      </dynamic>
-      <static>
-        <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true"
-          container="array" typedef="rectangle">
-          <array>
-            <size>4</size>
-            <size>num_regions</size>
-          </array>
-          <description>List of disjoint rectangles indicating the sensor
-          optically shielded black pixel regions.
-          </description>
-          <details>
-            In most camera sensors, the active array is surrounded by some
-            optically shielded pixel areas. By blocking light, these pixels
-            provides a reliable black reference for black level compensation
-            in active array region.
-
-            The data representation is int[4], which maps to (left, top, width, height).
-
-            This key provides a list of disjoint rectangles specifying the
-            regions of optically shielded (with metal shield) black pixel
-            regions if the camera device is capable of reading out these black
-            pixels in the output raw images. In comparison to the fixed black
-            level values reported by android.sensor.blackLevelPattern, this key
-            may provide a more accurate way for the application to calculate
-            black level of each captured raw images.
-
-            When this key is reported, the android.sensor.dynamicBlackLevel and
-            android.sensor.dynamicWhiteLevel will also be reported.
-          </details>
-          <hal_details>
-            This array contains (xmin, ymin, width, height). The (xmin, ymin)
-            must be &amp;gt;= (0,0) and &amp;lt;=
-            android.sensor.info.pixelArraySize. The (width, height) must be
-            &amp;lt;= android.sensor.info.pixelArraySize. Each region must be
-            outside the region reported by
-            android.sensor.info.preCorrectionActiveArraySize.
-
-            The HAL must report minimal number of disjoint regions for the
-            optically shielded back pixel regions. For example, if a region can
-            be covered by one rectangle, the HAL must not split this region into
-            multiple rectangles.
-          </hal_details>
-        </entry>
-      </static>
-      <dynamic>
-        <entry name="dynamicBlackLevel" type="float" visibility="public"
-        optional="true" type_notes="2x2 raw count block" container="array">
-          <array>
-            <size>4</size>
-          </array>
-          <description>
-          A per-frame dynamic black level offset for each of the color filter
-          arrangement (CFA) mosaic channels.
-          </description>
-          <range>&amp;gt;= 0 for each.</range>
-          <details>
-          Camera sensor black levels may vary dramatically for different
-          capture settings (e.g. android.sensor.sensitivity). The fixed black
-          level reported by android.sensor.blackLevelPattern may be too
-          inaccurate to represent the actual value on a per-frame basis. The
-          camera device internal pipeline relies on reliable black level values
-          to process the raw images appropriately. To get the best image
-          quality, the camera device may choose to estimate the per frame black
-          level values either based on optically shielded black regions
-          (android.sensor.opticalBlackRegions) or its internal model.
-
-          This key reports the camera device estimated per-frame zero light
-          value for each of the CFA mosaic channels in the camera sensor. The
-          android.sensor.blackLevelPattern may only represent a coarse
-          approximation of the actual black level values. This value is the
-          black level used in camera device internal image processing pipeline
-          and generally more accurate than the fixed black level values.
-          However, since they are estimated values by the camera device, they
-          may not be as accurate as the black level values calculated from the
-          optical black pixels reported by android.sensor.opticalBlackRegions.
-
-          The values are given in the same order as channels listed for the CFA
-          layout key (see android.sensor.info.colorFilterArrangement), i.e. the
-          nth value given corresponds to the black level offset for the nth
-          color channel listed in the CFA.
-
-          This key will be available if android.sensor.opticalBlackRegions is
-          available or the camera device advertises this key via
-          {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
-          </details>
-          <hal_details>
-          The values are given in row-column scan order, with the first value
-          corresponding to the element of the CFA in row=0, column=0.
-          </hal_details>
-          <tag id="RAW" />
-        </entry>
-        <entry name="dynamicWhiteLevel" type="int32" visibility="public"
-        optional="true" >
-          <description>
-          Maximum raw value output by sensor for this frame.
-          </description>
-          <range> &amp;gt;= 0</range>
-          <details>
-          Since the android.sensor.blackLevelPattern may change for different
-          capture settings (e.g., android.sensor.sensitivity), the white
-          level will change accordingly. This key is similar to
-          android.sensor.info.whiteLevel, but specifies the camera device
-          estimated white level for each frame.
-
-          This key will be available if android.sensor.opticalBlackRegions is
-          available or the camera device advertises this key via
-          {@link ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}.
-          </details>
-          <hal_details>
-          The full bit depth of the sensor must be available in the raw data,
-          so the value for linear sensors should not be significantly lower
-          than maximum raw value supported, i.e. 2^(sensor bits per pixel).
-          </hal_details>
-          <tag id="RAW" />
-        </entry>
-      </dynamic>
-      <static>
-        <entry name="opaqueRawSize" type="int32" visibility="system" container="array">
-          <array>
-            <size>n</size>
-            <size>3</size>
-          </array>
-          <description>Size in bytes for all the listed opaque RAW buffer sizes</description>
-          <range>Must be large enough to fit the opaque RAW of corresponding size produced by
-          the camera</range>
-          <details>
-          This configurations are listed as `(width, height, size_in_bytes)` tuples.
-          This is used for sizing the gralloc buffers for opaque RAW buffers.
-          All RAW_OPAQUE output stream configuration listed in
-          android.scaler.availableStreamConfigurations will have a corresponding tuple in
-          this key.
-          </details>
-          <hal_details>
-              This key is added in HAL3.4.
-              For HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this key.
-              For HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera framework
-              will derive this key by assuming each pixel takes two bytes and no padding bytes
-              between rows.
-          </hal_details>
-        </entry>
-      </static>
-    </section>
-    <section name="shading">
-      <controls>
-        <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full">
-          <enum>
-            <value>OFF
-            <notes>No lens shading correction is applied.</notes></value>
-            <value>FAST
-            <notes>Apply lens shading corrections, without slowing
-            frame rate relative to sensor raw output</notes></value>
-            <value>HIGH_QUALITY
-            <notes>Apply high-quality lens shading correction, at the
-            cost of possibly reduced frame rate.</notes></value>
-          </enum>
-          <description>Quality of lens shading correction applied
-          to the image data.</description>
-          <range>android.shading.availableModes</range>
-          <details>
-          When set to OFF mode, no lens shading correction will be applied by the
-          camera device, and an identity lens shading map data will be provided
-          if `android.statistics.lensShadingMapMode == ON`. For example, for lens
-          shading map with size of `[ 4, 3 ]`,
-          the output android.statistics.lensShadingCorrectionMap for this case will be an identity
-          map shown below:
-
-              [ 1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
-               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
-               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
-               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
-               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0,
-               1.0, 1.0, 1.0, 1.0,  1.0, 1.0, 1.0, 1.0 ]
-
-          When set to other modes, lens shading correction will be applied by the camera
-          device. Applications can request lens shading map data by setting
-          android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens
-          shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map
-          data will be the one applied by the camera device for this capture request.
-
-          The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
-          the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
-          AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=`
-          OFF), to get best results, it is recommended that the applications wait for the AE and AWB
-          to be converged before using the returned shading map data.
-          </details>
-        </entry>
-        <entry name="strength" type="byte">
-          <description>Control the amount of shading correction
-          applied to the images</description>
-          <units>unitless: 1-10; 10 is full shading
-          compensation</units>
-          <tag id="FUTURE" />
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.shading.mode" kind="controls">
-        </clone>
-      </dynamic>
-      <static>
-        <entry name="availableModes" type="byte" visibility="public"
-            type_notes="List of enums (android.shading.mode)." container="array"
-            typedef="enumList" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of lens shading modes for android.shading.mode that are supported by this camera device.
-          </description>
-          <range>Any value listed in android.shading.mode</range>
-          <details>
-              This list contains lens shading modes that can be set for the camera device.
-              Camera devices that support the MANUAL_POST_PROCESSING capability will always
-              list OFF and FAST mode. This includes all FULL level devices.
-              LEGACY devices will always only support FAST mode.
-          </details>
-          <hal_details>
-            HAL must support both FAST and HIGH_QUALITY if lens shading correction control is
-            available on the camera device, but the underlying implementation can be the same for
-            both modes. That is, if the highest quality implementation on the camera device does not
-            slow down capture rate, then FAST and HIGH_QUALITY will generate the same output.
-          </hal_details>
-        </entry>
-      </static>
-    </section>
-    <section name="statistics">
-      <controls>
-        <entry name="faceDetectMode" type="byte" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value>OFF
-            <notes>Do not include face detection statistics in capture
-            results.</notes></value>
-            <value optional="true">SIMPLE
-            <notes>Return face rectangle and confidence values only.
-            </notes></value>
-            <value optional="true">FULL
-            <notes>Return all face
-            metadata.
-
-            In this mode, face rectangles, scores, landmarks, and face IDs are all valid.
-            </notes></value>
-          </enum>
-          <description>Operating mode for the face detector
-          unit.</description>
-          <range>android.statistics.info.availableFaceDetectModes</range>
-          <details>Whether face detection is enabled, and whether it
-          should output just the basic fields or the full set of
-          fields.</details>
-          <hal_details>
-            SIMPLE mode must fill in android.statistics.faceRectangles and
-            android.statistics.faceScores.
-            FULL mode must also fill in android.statistics.faceIds, and
-            android.statistics.faceLandmarks.
-          </hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="histogramMode" type="byte" enum="true" typedef="boolean">
-          <enum>
-            <value>OFF</value>
-            <value>ON</value>
-          </enum>
-          <description>Operating mode for histogram
-          generation</description>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean">
-          <enum>
-            <value>OFF</value>
-            <value>ON</value>
-          </enum>
-          <description>Operating mode for sharpness map
-          generation</description>
-          <tag id="FUTURE" />
-        </entry>
-        <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true"
-        typedef="boolean">
-          <enum>
-            <value>OFF
-            <notes>Hot pixel map production is disabled.
-            </notes></value>
-            <value>ON
-            <notes>Hot pixel map production is enabled.
-            </notes></value>
-          </enum>
-          <description>
-          Operating mode for hot pixel map generation.
-          </description>
-          <range>android.statistics.info.availableHotPixelMapModes</range>
-          <details>
-          If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap.
-          If set to `false`, no hot pixel map will be returned.
-          </details>
-          <tag id="V1" />
-          <tag id="RAW" />
-        </entry>
-      </controls>
-      <static>
-        <namespace name="info">
-          <entry name="availableFaceDetectModes" type="byte"
-                 visibility="public"
-                 type_notes="List of enums from android.statistics.faceDetectMode"
-                 container="array"
-                 typedef="enumList"
-                 hwlevel="legacy">
-            <array>
-              <size>n</size>
-            </array>
-            <description>List of face detection modes for android.statistics.faceDetectMode that are
-            supported by this camera device.
-            </description>
-            <range>Any value listed in android.statistics.faceDetectMode</range>
-            <details>OFF is always supported.
-            </details>
-          </entry>
-          <entry name="histogramBucketCount" type="int32">
-            <description>Number of histogram buckets
-            supported</description>
-            <range>&amp;gt;= 64</range>
-            <tag id="FUTURE" />
-          </entry>
-          <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy">
-            <description>The maximum number of simultaneously detectable
-            faces.</description>
-            <range>0 for cameras without available face detection; otherwise:
-            `&gt;=4` for LIMITED or FULL hwlevel devices or
-            `&gt;0` for LEGACY devices.</range>
-            <tag id="BC" />
-          </entry>
-          <entry name="maxHistogramCount" type="int32">
-            <description>Maximum value possible for a histogram
-            bucket</description>
-            <tag id="FUTURE" />
-          </entry>
-          <entry name="maxSharpnessMapValue" type="int32">
-            <description>Maximum value possible for a sharpness map
-            region.</description>
-            <tag id="FUTURE" />
-          </entry>
-          <entry name="sharpnessMapSize" type="int32"
-          type_notes="width x height" container="array" typedef="size">
-            <array>
-              <size>2</size>
-            </array>
-            <description>Dimensions of the sharpness
-            map</description>
-            <range>Must be at least 32 x 32</range>
-            <tag id="FUTURE" />
-          </entry>
-          <entry name="availableHotPixelMapModes" type="byte" visibility="public"
-                 type_notes="list of enums" container="array" typedef="boolean">
-            <array>
-              <size>n</size>
-            </array>
-            <description>
-            List of hot pixel map output modes for android.statistics.hotPixelMapMode that are
-            supported by this camera device.
-            </description>
-            <range>Any value listed in android.statistics.hotPixelMapMode</range>
-            <details>
-            If no hotpixel map output is available for this camera device, this will contain only
-            `false`.
-
-            ON is always supported on devices with the RAW capability.
-            </details>
-            <tag id="V1" />
-            <tag id="RAW" />
-          </entry>
-          <entry name="availableLensShadingMapModes" type="byte" visibility="public"
-                 type_notes="list of enums" container="array" typedef="enumList">
-            <array>
-              <size>n</size>
-            </array>
-            <description>
-            List of lens shading map output modes for android.statistics.lensShadingMapMode that
-            are supported by this camera device.
-            </description>
-            <range>Any value listed in android.statistics.lensShadingMapMode</range>
-            <details>
-            If no lens shading map output is available for this camera device, this key will
-            contain only OFF.
-
-            ON is always supported on devices with the RAW capability.
-            LEGACY mode devices will always only support OFF.
-            </details>
-          </entry>
-        </namespace>
-      </static>
-      <dynamic>
-        <clone entry="android.statistics.faceDetectMode"
-               kind="controls"></clone>
-        <entry name="faceIds" type="int32" visibility="ndk_public"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>List of unique IDs for detected faces.</description>
-          <details>
-          Each detected face is given a unique ID that is valid for as long as the face is visible
-          to the camera device.  A face that leaves the field of view and later returns may be
-          assigned a new ID.
-
-          Only available if android.statistics.faceDetectMode == FULL</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="faceLandmarks" type="int32" visibility="ndk_public"
-               type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>n</size>
-            <size>6</size>
-          </array>
-          <description>List of landmarks for detected
-          faces.</description>
-          <details>
-            The coordinate system is that of android.sensor.info.activeArraySize, with
-            `(0, 0)` being the top-left pixel of the active array.
-
-            Only available if android.statistics.faceDetectMode == FULL</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="faceRectangles" type="int32" visibility="ndk_public"
-               type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area"
-               container="array" typedef="rectangle" hwlevel="legacy">
-          <array>
-            <size>n</size>
-            <size>4</size>
-          </array>
-          <description>List of the bounding rectangles for detected
-          faces.</description>
-          <details>
-            The data representation is int[4], which maps to (left, top, width, height).
-
-            The coordinate system is that of android.sensor.info.activeArraySize, with
-            `(0, 0)` being the top-left pixel of the active array.
-
-            Only available if android.statistics.faceDetectMode != OFF</details>
-          <tag id="BC" />
-        </entry>
-        <entry name="faceScores" type="byte" visibility="ndk_public"
-               container="array" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>List of the face confidence scores for
-          detected faces</description>
-          <range>1-100</range>
-          <details>Only available if android.statistics.faceDetectMode != OFF.
-          </details>
-          <hal_details>
-          The value should be meaningful (for example, setting 100 at
-          all times is illegal).</hal_details>
-          <tag id="BC" />
-        </entry>
-        <entry name="faces" type="int32" visibility="java_public" synthetic="true"
-               container="array" typedef="face" hwlevel="legacy">
-          <array>
-            <size>n</size>
-          </array>
-          <description>List of the faces detected through camera face detection
-          in this capture.</description>
-          <details>
-          Only available if android.statistics.faceDetectMode `!=` OFF.
-          </details>
-        </entry>
-        <entry name="histogram" type="int32"
-        type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount"
-        container="array">
-          <array>
-            <size>n</size>
-            <size>3</size>
-          </array>
-          <description>A 3-channel histogram based on the raw
-          sensor data</description>
-          <details>The k'th bucket (0-based) covers the input range
-          (with w = android.sensor.info.whiteLevel) of [ k * w/N,
-          (k + 1) * w / N ). If only a monochrome sharpness map is
-          supported, all channels should have the same data</details>
-          <tag id="FUTURE" />
-        </entry>
-        <clone entry="android.statistics.histogramMode"
-        kind="controls"></clone>
-        <entry name="sharpnessMap" type="int32"
-        type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)"
-        container="array">
-          <array>
-            <size>n</size>
-            <size>m</size>
-            <size>3</size>
-          </array>
-          <description>A 3-channel sharpness map, based on the raw
-          sensor data</description>
-          <details>If only a monochrome sharpness map is supported,
-          all channels should have the same data</details>
-          <tag id="FUTURE" />
-        </entry>
-        <clone entry="android.statistics.sharpnessMapMode"
-               kind="controls"></clone>
-        <entry name="lensShadingCorrectionMap" type="byte" visibility="java_public"
-               typedef="lensShadingMap" hwlevel="full">
-          <description>The shading map is a low-resolution floating-point map
-          that lists the coefficients used to correct for vignetting, for each
-          Bayer color channel.</description>
-          <range>Each gain factor is &amp;gt;= 1</range>
-          <details>
-          The map provided here is the same map that is used by the camera device to
-          correct both color shading and vignetting for output non-RAW images.
-
-          When there is no lens shading correction applied to RAW
-          output images (android.sensor.info.lensShadingApplied `==`
-          false), this map is the complete lens shading correction
-          map; when there is some lens shading correction applied to
-          the RAW output image (android.sensor.info.lensShadingApplied
-          `==` true), this map reports the remaining lens shading
-          correction map that needs to be applied to get shading
-          corrected images that match the camera device's output for
-          non-RAW formats.
-
-          For a complete shading correction map, the least shaded
-          section of the image will have a gain factor of 1; all
-          other sections will have gains above 1.
-
-          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
-          will take into account the colorCorrection settings.
-
-          The shading map is for the entire active pixel array, and is not
-          affected by the crop region specified in the request. Each shading map
-          entry is the value of the shading compensation map over a specific
-          pixel on the sensor.  Specifically, with a (N x M) resolution shading
-          map, and an active pixel array size (W x H), shading map entry
-          (x,y) Ïµ (0 ... N-1, 0 ... M-1) is the value of the shading map at
-          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
-          The map is assumed to be bilinearly interpolated between the sample points.
-
-          The channel order is [R, Geven, Godd, B], where Geven is the green
-          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
-          The shading map is stored in a fully interleaved format.
-
-          The shading map will generally have on the order of 30-40 rows and columns,
-          and will be smaller than 64x64.
-
-          As an example, given a very small map defined as:
-
-              width,height = [ 4, 3 ]
-              values =
-              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
-                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
-                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
-                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
-                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
-                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
-
-          The low-resolution scaling map images for each channel are
-          (displayed using nearest-neighbor interpolation):
-
-          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
-          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
-          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
-          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
-
-          As a visualization only, inverting the full-color map to recover an
-          image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:
-
-          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
-          </details>
-        </entry>
-        <entry name="lensShadingMap" type="float" visibility="ndk_public"
-               type_notes="2D array of float gain factors per channel to correct lens shading"
-               container="array" hwlevel="full">
-          <array>
-            <size>4</size>
-            <size>n</size>
-            <size>m</size>
-          </array>
-          <description>The shading map is a low-resolution floating-point map
-          that lists the coefficients used to correct for vignetting and color shading,
-          for each Bayer color channel of RAW image data.</description>
-          <range>Each gain factor is &amp;gt;= 1</range>
-          <details>
-          The map provided here is the same map that is used by the camera device to
-          correct both color shading and vignetting for output non-RAW images.
-
-          When there is no lens shading correction applied to RAW
-          output images (android.sensor.info.lensShadingApplied `==`
-          false), this map is the complete lens shading correction
-          map; when there is some lens shading correction applied to
-          the RAW output image (android.sensor.info.lensShadingApplied
-          `==` true), this map reports the remaining lens shading
-          correction map that needs to be applied to get shading
-          corrected images that match the camera device's output for
-          non-RAW formats.
-
-          For a complete shading correction map, the least shaded
-          section of the image will have a gain factor of 1; all
-          other sections will have gains above 1.
-
-          When android.colorCorrection.mode = TRANSFORM_MATRIX, the map
-          will take into account the colorCorrection settings.
-
-          The shading map is for the entire active pixel array, and is not
-          affected by the crop region specified in the request. Each shading map
-          entry is the value of the shading compensation map over a specific
-          pixel on the sensor.  Specifically, with a (N x M) resolution shading
-          map, and an active pixel array size (W x H), shading map entry
-          (x,y) Ïµ (0 ... N-1, 0 ... M-1) is the value of the shading map at
-          pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
-          The map is assumed to be bilinearly interpolated between the sample points.
-
-          The channel order is [R, Geven, Godd, B], where Geven is the green
-          channel for the even rows of a Bayer pattern, and Godd is the odd rows.
-          The shading map is stored in a fully interleaved format, and its size
-          is provided in the camera static metadata by android.lens.info.shadingMapSize.
-
-          The shading map will generally have on the order of 30-40 rows and columns,
-          and will be smaller than 64x64.
-
-          As an example, given a very small map defined as:
-
-              android.lens.info.shadingMapSize = [ 4, 3 ]
-              android.statistics.lensShadingMap =
-              [ 1.3, 1.2, 1.15, 1.2,  1.2, 1.2, 1.15, 1.2,
-                  1.1, 1.2, 1.2, 1.2,  1.3, 1.2, 1.3, 1.3,
-                1.2, 1.2, 1.25, 1.1,  1.1, 1.1, 1.1, 1.0,
-                  1.0, 1.0, 1.0, 1.0,  1.2, 1.3, 1.25, 1.2,
-                1.3, 1.2, 1.2, 1.3,   1.2, 1.15, 1.1, 1.2,
-                  1.2, 1.1, 1.0, 1.2,  1.3, 1.15, 1.2, 1.3 ]
-
-          The low-resolution scaling map images for each channel are
-          (displayed using nearest-neighbor interpolation):
-
-          ![Red lens shading map](android.statistics.lensShadingMap/red_shading.png)
-          ![Green (even rows) lens shading map](android.statistics.lensShadingMap/green_e_shading.png)
-          ![Green (odd rows) lens shading map](android.statistics.lensShadingMap/green_o_shading.png)
-          ![Blue lens shading map](android.statistics.lensShadingMap/blue_shading.png)
-
-          As a visualization only, inverting the full-color map to recover an
-          image of a gray wall (using bicubic interpolation for visual quality)
-          as captured by the sensor gives:
-
-          ![Image of a uniform white wall (inverse shading map)](android.statistics.lensShadingMap/inv_shading.png)
-
-          Note that the RAW image data might be subject to lens shading
-          correction not reported on this map. Query
-          android.sensor.info.lensShadingApplied to see if RAW image data has subject
-          to lens shading correction. If android.sensor.info.lensShadingApplied
-          is TRUE, the RAW image data is subject to partial or full lens shading
-          correction. In the case full lens shading correction is applied to RAW
-          images, the gain factor map reported in this key will contain all 1.0 gains.
-          In other words, the map reported in this key is the remaining lens shading
-          that needs to be applied on the RAW image to get images without lens shading
-          artifacts. See android.request.maxNumOutputRaw for a list of RAW image
-          formats.
-          </details>
-          <hal_details>
-          The lens shading map calculation may depend on exposure and white balance statistics.
-          When AE and AWB are in AUTO modes
-          (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL
-          may have all the information it need to generate most accurate lens shading map. When
-          AE or AWB are in manual mode
-          (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map
-          may be adversely impacted by manual exposure or white balance parameters. To avoid
-          generating unreliable shading map data, the HAL may choose to lock the shading map with
-          the latest known good map generated when the AE and AWB are in AUTO modes.
-          </hal_details>
-        </entry>
-        <entry name="predictedColorGains" type="float"
-               visibility="hidden"
-               deprecated="true"
-               optional="true"
-               type_notes="A 1D array of floats for 4 color channel gains"
-               container="array">
-          <array>
-            <size>4</size>
-          </array>
-          <description>The best-fit color channel gains calculated
-          by the camera device's statistics units for the current output frame.
-          </description>
-          <details>
-          This may be different than the gains used for this frame,
-          since statistics processing on data from a new frame
-          typically completes after the transform has already been
-          applied to that frame.
-
-          The 4 channel gains are defined in Bayer domain,
-          see android.colorCorrection.gains for details.
-
-          This value should always be calculated by the auto-white balance (AWB) block,
-          regardless of the android.control.* current values.
-          </details>
-        </entry>
-        <entry name="predictedColorTransform" type="rational"
-               visibility="hidden"
-               deprecated="true"
-               optional="true"
-               type_notes="3x3 rational matrix in row-major order"
-               container="array">
-          <array>
-            <size>3</size>
-            <size>3</size>
-          </array>
-          <description>The best-fit color transform matrix estimate
-          calculated by the camera device's statistics units for the current
-          output frame.</description>
-          <details>The camera device will provide the estimate from its
-          statistics unit on the white balance transforms to use
-          for the next frame. These are the values the camera device believes
-          are the best fit for the current output frame. This may
-          be different than the transform used for this frame, since
-          statistics processing on data from a new frame typically
-          completes after the transform has already been applied to
-          that frame.
-
-          These estimates must be provided for all frames, even if
-          capture settings and color transforms are set by the application.
-
-          This value should always be calculated by the auto-white balance (AWB) block,
-          regardless of the android.control.* current values.
-          </details>
-        </entry>
-        <entry name="sceneFlicker" type="byte" visibility="public" enum="true"
-               hwlevel="full">
-          <enum>
-            <value>NONE
-            <notes>The camera device does not detect any flickering illumination
-            in the current scene.</notes></value>
-            <value>50HZ
-            <notes>The camera device detects illumination flickering at 50Hz
-            in the current scene.</notes></value>
-            <value>60HZ
-            <notes>The camera device detects illumination flickering at 60Hz
-            in the current scene.</notes></value>
-          </enum>
-          <description>The camera device estimated scene illumination lighting
-          frequency.</description>
-          <details>
-          Many light sources, such as most fluorescent lights, flicker at a rate
-          that depends on the local utility power standards. This flicker must be
-          accounted for by auto-exposure routines to avoid artifacts in captured images.
-          The camera device uses this entry to tell the application what the scene
-          illuminant frequency is.
-
-          When manual exposure control is enabled
-          (`android.control.aeMode == OFF` or `android.control.mode ==
-          OFF`), the android.control.aeAntibandingMode doesn't perform
-          antibanding, and the application can ensure it selects
-          exposure times that do not cause banding issues by looking
-          into this metadata field. See
-          android.control.aeAntibandingMode for more details.
-
-          Reports NONE if there doesn't appear to be flickering illumination.
-          </details>
-        </entry>
-        <clone entry="android.statistics.hotPixelMapMode" kind="controls">
-        </clone>
-        <entry name="hotPixelMap" type="int32" visibility="public"
-        type_notes="list of coordinates based on android.sensor.pixelArraySize"
-        container="array" typedef="point">
-          <array>
-            <size>2</size>
-            <size>n</size>
-          </array>
-          <description>
-          List of `(x, y)` coordinates of hot/defective pixels on the sensor.
-          </description>
-          <range>
-          n &lt;= number of pixels on the sensor.
-          The `(x, y)` coordinates must be bounded by
-          android.sensor.info.pixelArraySize.
-          </range>
-          <details>
-          A coordinate `(x, y)` must lie between `(0, 0)`, and
-          `(width - 1, height - 1)` (inclusive), which are the top-left and
-          bottom-right of the pixel array, respectively. The width and
-          height dimensions are given in android.sensor.info.pixelArraySize.
-          This may include hot pixels that lie outside of the active array
-          bounds given by android.sensor.info.activeArraySize.
-          </details>
-          <hal_details>
-          A hotpixel map contains the coordinates of pixels on the camera
-          sensor that do report valid values (usually due to defects in
-          the camera sensor). This includes pixels that are stuck at certain
-          values, or have a response that does not accuractly encode the
-          incoming light from the scene.
-
-          To avoid performance issues, there should be significantly fewer hot
-          pixels than actual pixels on the camera sensor.
-          </hal_details>
-          <tag id="V1" />
-          <tag id="RAW" />
-        </entry>
-      </dynamic>
-      <controls>
-        <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full">
-          <enum>
-            <value>OFF
-            <notes>Do not include a lens shading map in the capture result.</notes></value>
-            <value>ON
-            <notes>Include a lens shading map in the capture result.</notes></value>
-          </enum>
-          <description>Whether the camera device will output the lens
-          shading map in output result metadata.</description>
-          <range>android.statistics.info.availableLensShadingMapModes</range>
-          <details>When set to ON,
-          android.statistics.lensShadingMap will be provided in
-          the output result metadata.
-
-          ON is always supported on devices with the RAW capability.
-          </details>
-          <tag id="RAW" />
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.statistics.lensShadingMapMode" kind="controls">
-        </clone>
-      </dynamic>
-    </section>
-    <section name="tonemap">
-      <controls>
-        <entry name="curveBlue" type="float" visibility="ndk_public"
-        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
-        container="array" hwlevel="full">
-          <array>
-            <size>n</size>
-            <size>2</size>
-          </array>
-          <description>Tonemapping / contrast / gamma curve for the blue
-          channel, to use when android.tonemap.mode is
-          CONTRAST_CURVE.</description>
-          <details>See android.tonemap.curveRed for more details.</details>
-        </entry>
-        <entry name="curveGreen" type="float" visibility="ndk_public"
-        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
-        container="array" hwlevel="full">
-          <array>
-            <size>n</size>
-            <size>2</size>
-          </array>
-          <description>Tonemapping / contrast / gamma curve for the green
-          channel, to use when android.tonemap.mode is
-          CONTRAST_CURVE.</description>
-          <details>See android.tonemap.curveRed for more details.</details>
-        </entry>
-        <entry name="curveRed" type="float" visibility="ndk_public"
-        type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints."
-        container="array" hwlevel="full">
-          <array>
-            <size>n</size>
-            <size>2</size>
-          </array>
-          <description>Tonemapping / contrast / gamma curve for the red
-          channel, to use when android.tonemap.mode is
-          CONTRAST_CURVE.</description>
-          <range>0-1 on both input and output coordinates, normalized
-          as a floating-point value such that 0 == black and 1 == white.
-          </range>
-          <details>
-          Each channel's curve is defined by an array of control points:
-
-              android.tonemap.curveRed =
-                [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
-              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
-
-          These are sorted in order of increasing `Pin`; it is
-          required that input values 0.0 and 1.0 are included in the list to
-          define a complete mapping. For input values between control points,
-          the camera device must linearly interpolate between the control
-          points.
-
-          Each curve can have an independent number of points, and the number
-          of points can be less than max (that is, the request doesn't have to
-          always provide a curve with number of points equivalent to
-          android.tonemap.maxCurvePoints).
-
-          A few examples, and their corresponding graphical mappings; these
-          only specify the red channel and the precision is limited to 4
-          digits, for conciseness.
-
-          Linear mapping:
-
-              android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
-
-          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
-
-          Invert mapping:
-
-              android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
-
-          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
-
-          Gamma 1/2.2 mapping, with 16 control points:
-
-              android.tonemap.curveRed = [
-                0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
-                0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
-                0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
-                0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
-
-          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
-
-          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
-
-              android.tonemap.curveRed = [
-                0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
-                0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
-                0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
-                0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
-
-          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
-        </details>
-        <hal_details>
-          For good quality of mapping, at least 128 control points are
-          preferred.
-
-          A typical use case of this would be a gamma-1/2.2 curve, with as many
-          control points used as are available.
-        </hal_details>
-        </entry>
-        <entry name="curve" type="float" visibility="java_public" synthetic="true"
-               typedef="tonemapCurve"
-               hwlevel="full">
-          <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode
-          is CONTRAST_CURVE.</description>
-          <details>
-          The tonemapCurve consist of three curves for each of red, green, and blue
-          channels respectively. The following example uses the red channel as an
-          example. The same logic applies to green and blue channel.
-          Each channel's curve is defined by an array of control points:
-
-              curveRed =
-                [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
-              2 &lt;= N &lt;= android.tonemap.maxCurvePoints
-
-          These are sorted in order of increasing `Pin`; it is always
-          guaranteed that input values 0.0 and 1.0 are included in the list to
-          define a complete mapping. For input values between control points,
-          the camera device must linearly interpolate between the control
-          points.
-
-          Each curve can have an independent number of points, and the number
-          of points can be less than max (that is, the request doesn't have to
-          always provide a curve with number of points equivalent to
-          android.tonemap.maxCurvePoints).
-
-          A few examples, and their corresponding graphical mappings; these
-          only specify the red channel and the precision is limited to 4
-          digits, for conciseness.
-
-          Linear mapping:
-
-              curveRed = [ (0, 0), (1.0, 1.0) ]
-
-          ![Linear mapping curve](android.tonemap.curveRed/linear_tonemap.png)
-
-          Invert mapping:
-
-              curveRed = [ (0, 1.0), (1.0, 0) ]
-
-          ![Inverting mapping curve](android.tonemap.curveRed/inverse_tonemap.png)
-
-          Gamma 1/2.2 mapping, with 16 control points:
-
-              curveRed = [
-                (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
-                (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
-                (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
-                (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
-
-          ![Gamma = 1/2.2 tonemapping curve](android.tonemap.curveRed/gamma_tonemap.png)
-
-          Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:
-
-              curveRed = [
-                (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
-                (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
-                (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
-                (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
-
-          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
-        </details>
-        <hal_details>
-            This entry is created by the framework from the curveRed, curveGreen and
-            curveBlue entries.
-        </hal_details>
-        </entry>
-        <entry name="mode" type="byte" visibility="public" enum="true"
-               hwlevel="full">
-          <enum>
-            <value>CONTRAST_CURVE
-              <notes>Use the tone mapping curve specified in
-              the android.tonemap.curve* entries.
-
-              All color enhancement and tonemapping must be disabled, except
-              for applying the tonemapping curve specified by
-              android.tonemap.curve.
-
-              Must not slow down frame rate relative to raw
-              sensor output.
-              </notes>
-            </value>
-            <value>FAST
-              <notes>
-              Advanced gamma mapping and color enhancement may be applied, without
-              reducing frame rate compared to raw sensor output.
-              </notes>
-            </value>
-            <value>HIGH_QUALITY
-              <notes>
-              High-quality gamma mapping and color enhancement will be applied, at
-              the cost of possibly reduced frame rate compared to raw sensor output.
-              </notes>
-            </value>
-            <value>GAMMA_VALUE
-              <notes>
-              Use the gamma value specified in android.tonemap.gamma to peform
-              tonemapping.
-
-              All color enhancement and tonemapping must be disabled, except
-              for applying the tonemapping curve specified by android.tonemap.gamma.
-
-              Must not slow down frame rate relative to raw sensor output.
-              </notes>
-            </value>
-            <value>PRESET_CURVE
-              <notes>
-              Use the preset tonemapping curve specified in
-              android.tonemap.presetCurve to peform tonemapping.
-
-              All color enhancement and tonemapping must be disabled, except
-              for applying the tonemapping curve specified by
-              android.tonemap.presetCurve.
-
-              Must not slow down frame rate relative to raw sensor output.
-              </notes>
-            </value>
-          </enum>
-          <description>High-level global contrast/gamma/tonemapping control.
-          </description>
-          <range>android.tonemap.availableToneMapModes</range>
-          <details>
-          When switching to an application-defined contrast curve by setting
-          android.tonemap.mode to CONTRAST_CURVE, the curve is defined
-          per-channel with a set of `(in, out)` points that specify the
-          mapping from input high-bit-depth pixel value to the output
-          low-bit-depth value.  Since the actual pixel ranges of both input
-          and output may change depending on the camera pipeline, the values
-          are specified by normalized floating-point numbers.
-
-          More-complex color mapping operations such as 3D color look-up
-          tables, selective chroma enhancement, or other non-linear color
-          transforms will be disabled when android.tonemap.mode is
-          CONTRAST_CURVE.
-
-          When using either FAST or HIGH_QUALITY, the camera device will
-          emit its own tonemap curve in android.tonemap.curve.
-          These values are always available, and as close as possible to the
-          actually used nonlinear/nonglobal transforms.
-
-          If a request is sent with CONTRAST_CURVE with the camera device's
-          provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
-          roughly the same.</details>
-        </entry>
-      </controls>
-      <static>
-        <entry name="maxCurvePoints" type="int32" visibility="public"
-               hwlevel="full">
-          <description>Maximum number of supported points in the
-            tonemap curve that can be used for android.tonemap.curve.
-          </description>
-          <details>
-          If the actual number of points provided by the application (in android.tonemap.curve*) is
-          less than this maximum, the camera device will resample the curve to its internal
-          representation, using linear interpolation.
-
-          The output curves in the result metadata may have a different number
-          of points than the input curves, and will represent the actual
-          hardware curves used as closely as possible when linearly interpolated.
-          </details>
-          <hal_details>
-          This value must be at least 64. This should be at least 128.
-          </hal_details>
-        </entry>
-        <entry name="availableToneMapModes" type="byte" visibility="public"
-        type_notes="list of enums" container="array" typedef="enumList" hwlevel="full">
-          <array>
-            <size>n</size>
-          </array>
-          <description>
-          List of tonemapping modes for android.tonemap.mode that are supported by this camera
-          device.
-          </description>
-          <range>Any value listed in android.tonemap.mode</range>
-          <details>
-          Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
-          at least one of below mode combinations:
-
-          * CONTRAST_CURVE, FAST and HIGH_QUALITY
-          * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY
-
-          This includes all FULL level devices.
-          </details>
-          <hal_details>
-            HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available
-            on the camera device, but the underlying implementation can be the same for both modes.
-            That is, if the highest quality implementation on the camera device does not slow down
-            capture rate, then FAST and HIGH_QUALITY will generate the same output.
-          </hal_details>
-        </entry>
-      </static>
-      <dynamic>
-        <clone entry="android.tonemap.curveBlue" kind="controls">
-        </clone>
-        <clone entry="android.tonemap.curveGreen" kind="controls">
-        </clone>
-        <clone entry="android.tonemap.curveRed" kind="controls">
-        </clone>
-        <clone entry="android.tonemap.curve" kind="controls">
-        </clone>
-        <clone entry="android.tonemap.mode" kind="controls">
-        </clone>
-      </dynamic>
-      <controls>
-        <entry name="gamma" type="float" visibility="public">
-          <description> Tonemapping curve to use when android.tonemap.mode is
-          GAMMA_VALUE
-          </description>
-          <details>
-          The tonemap curve will be defined the following formula:
-          * OUT = pow(IN, 1.0 / gamma)
-          where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
-          pow is the power function and gamma is the gamma value specified by this
-          key.
-
-          The same curve will be applied to all color channels. The camera device
-          may clip the input gamma value to its supported range. The actual applied
-          value will be returned in capture result.
-
-          The valid range of gamma value varies on different devices, but values
-          within [1.0, 5.0] are guaranteed not to be clipped.
-          </details>
-        </entry>
-        <entry name="presetCurve" type="byte" visibility="public" enum="true">
-          <enum>
-            <value>SRGB
-              <notes>Tonemapping curve is defined by sRGB</notes>
-            </value>
-            <value>REC709
-              <notes>Tonemapping curve is defined by ITU-R BT.709</notes>
-            </value>
-          </enum>
-          <description> Tonemapping curve to use when android.tonemap.mode is
-          PRESET_CURVE
-          </description>
-          <details>
-          The tonemap curve will be defined by specified standard.
-
-          sRGB (approximated by 16 control points):
-
-          ![sRGB tonemapping curve](android.tonemap.curveRed/srgb_tonemap.png)
-
-          Rec. 709 (approximated by 16 control points):
-
-          ![Rec. 709 tonemapping curve](android.tonemap.curveRed/rec709_tonemap.png)
-
-          Note that above figures show a 16 control points approximation of preset
-          curves. Camera devices may apply a different approximation to the curve.
-          </details>
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.tonemap.gamma" kind="controls">
-        </clone>
-        <clone entry="android.tonemap.presetCurve" kind="controls">
-        </clone>
-      </dynamic>
-    </section>
-    <section name="led">
-      <controls>
-        <entry name="transmit" type="byte" visibility="hidden" optional="true"
-               enum="true" typedef="boolean">
-          <enum>
-            <value>OFF</value>
-            <value>ON</value>
-          </enum>
-          <description>This LED is nominally used to indicate to the user
-          that the camera is powered on and may be streaming images back to the
-          Application Processor. In certain rare circumstances, the OS may
-          disable this when video is processed locally and not transmitted to
-          any untrusted applications.
-
-          In particular, the LED *must* always be on when the data could be
-          transmitted off the device. The LED *should* always be on whenever
-          data is stored locally on the device.
-
-          The LED *may* be off if a trusted application is using the data that
-          doesn't violate the above rules.
-          </description>
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.led.transmit" kind="controls"></clone>
-      </dynamic>
-      <static>
-        <entry name="availableLeds" type="byte" visibility="hidden" optional="true"
-               enum="true"
-               container="array">
-          <array>
-            <size>n</size>
-          </array>
-          <enum>
-            <value>TRANSMIT
-              <notes>android.led.transmit control is used.</notes>
-            </value>
-          </enum>
-          <description>A list of camera LEDs that are available on this system.
-          </description>
-        </entry>
-      </static>
-    </section>
-    <section name="info">
-      <static>
-        <entry name="supportedHardwareLevel" type="byte" visibility="public"
-               enum="true" hwlevel="legacy">
-          <enum>
-            <value>
-              LIMITED
-              <notes>
-              This camera device does not have enough capabilities to qualify as a `FULL` device or
-              better.
-
-              Only the stream configurations listed in the `LEGACY` and `LIMITED` tables in the
-              {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
-
-              All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic
-              support for color image capture. The only exception is that the device may
-              alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth
-              measurements and not color images.
-
-              `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger
-              to lock exposure metering (and calculate flash power, for cameras with flash) before
-              capturing a high-quality still image.
-
-              A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only
-              required to support full-automatic operation and post-processing (`OFF` is not
-              supported for android.control.aeMode, android.control.afMode, or
-              android.control.awbMode)
-
-              Additional capabilities may optionally be supported by a `LIMITED`-level device, and
-              can be checked for in android.request.availableCapabilities.
-              </notes>
-            </value>
-            <value>
-              FULL
-              <notes>
-              This camera device is capable of supporting advanced imaging applications.
-
-              The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` tables in the
-              {@link ACameraDevice_createCaptureSession} documentation are guaranteed to be supported.
-
-              A `FULL` device will support below capabilities:
-
-              * `BURST_CAPTURE` capability (android.request.availableCapabilities contains
-                `BURST_CAPTURE`)
-              * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL)
-              * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`)
-              * Manual post-processing control (android.request.availableCapabilities contains
-                `MANUAL_POST_PROCESSING`)
-              * The required exposure time range defined in android.sensor.info.exposureTimeRange
-              * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration
-
-              Note:
-              Pre-API level 23, FULL devices also supported arbitrary cropping region
-              (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level
-              23, and `FULL` devices may only support `CENTERED` cropping.
-              </notes>
-            </value>
-            <value>
-              LEGACY
-              <notes>
-              This camera device is running in backward compatibility mode.
-
-              Only the stream configurations listed in the `LEGACY` table in the {@link
-              ACameraDevice_createCaptureSession} documentation are supported.
-
-              A `LEGACY` device does not support per-frame control, manual sensor control, manual
-              post-processing, arbitrary cropping regions, and has relaxed performance constraints.
-              No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a
-              `LEGACY` device in android.request.availableCapabilities.
-
-              In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY`
-              devices. Instead, every request that includes a JPEG-format output target is treated
-              as triggering a still capture, internally executing a precapture trigger.  This may
-              fire the flash for flash power metering during precapture, and then fire the flash
-              for the final capture, if a flash is available on the device and the AE mode is set to
-              enable the flash.
-              </notes>
-            </value>
-            <value>
-              3
-              <notes>
-              This camera device is capable of YUV reprocessing and RAW data capture, in addition to
-              FULL-level capabilities.
-
-              The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and
-              `LIMITED` tables in the {@link
-              ACameraDevice_createCaptureSession}
-              documentation are guaranteed to be supported.
-
-              The following additional capabilities are guaranteed to be supported:
-
-              * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains
-                `YUV_REPROCESSING`)
-              * `RAW` capability (android.request.availableCapabilities contains
-                `RAW`)
-              </notes>
-            </value>
-          </enum>
-          <description>
-          Generally classifies the overall set of the camera device functionality.
-          </description>
-          <details>
-          The supported hardware level is a high-level description of the camera device's
-          capabilities, summarizing several capabilities into one field.  Each level adds additional
-          features to the previous one, and is always a strict superset of the previous level.
-          The ordering is `LEGACY &lt; LIMITED &lt; FULL &lt; LEVEL_3`.
-
-          Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing
-          numerical value as well. To check if a given device is at least at a given hardware level,
-          the following code snippet can be used:
-
-              // Returns true if the device supports the required hardware level, or better.
-              boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
-                  int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
-                  if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
-                      return requiredLevel == deviceLevel;
-                  }
-                  // deviceLevel is not LEGACY, can use numerical sort
-                  return requiredLevel &lt;= deviceLevel;
-              }
-
-          At a high level, the levels are:
-
-          * `LEGACY` devices operate in a backwards-compatibility mode for older
-            Android devices, and have very limited capabilities.
-          * `LIMITED` devices represent the
-            baseline feature set, and may also include additional capabilities that are
-            subsets of `FULL`.
-          * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and
-            post-processing settings, and image capture at a high rate.
-          * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along
-            with additional output stream configurations.
-
-          See the individual level enums for full descriptions of the supported capabilities.  The
-          android.request.availableCapabilities entry describes the device's capabilities at a
-          finer-grain level, if needed. In addition, many controls have their available settings or
-          ranges defined in individual metadata tag entries in this document.
-
-          Some features are not part of any particular hardware level or capability and must be
-          queried separately. These include:
-
-          * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME)
-          * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED)
-          * Face detection (android.statistics.info.availableFaceDetectModes)
-          * Optical or electrical image stabilization
-            (android.lens.info.availableOpticalStabilization,
-             android.control.availableVideoStabilizationModes)
-
-          </details>
-          <hal_details>
-          The camera 3 HAL device can implement one of three possible operational modes; LIMITED,
-          FULL, and LEVEL_3.
-
-          FULL support or better is expected from new higher-end devices. Limited
-          mode has hardware requirements roughly in line with those for a camera HAL device v1
-          implementation, and is expected from older or inexpensive devices. Each level is a strict
-          superset of the previous level, and they share the same essential operational flow.
-
-          For full details refer to "S3. Operational Modes" in camera3.h
-
-          Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in
-          the `android.hardware.camera2` user-facing API only on HALv1 devices, and is implemented
-          by the camera framework code.
-          </hal_details>
-        </entry>
-      </static>
-    </section>
-    <section name="blackLevel">
-      <controls>
-        <entry name="lock" type="byte" visibility="public" enum="true"
-               typedef="boolean" hwlevel="full">
-          <enum>
-            <value>OFF</value>
-            <value>ON</value>
-          </enum>
-          <description> Whether black-level compensation is locked
-          to its current values, or is free to vary.</description>
-          <details>When set to `true` (ON), the values used for black-level
-          compensation will not change until the lock is set to
-          `false` (OFF).
-
-          Since changes to certain capture parameters (such as
-          exposure time) may require resetting of black level
-          compensation, the camera device must report whether setting
-          the black level lock was successful in the output result
-          metadata.
-
-          For example, if a sequence of requests is as follows:
-
-          * Request 1: Exposure = 10ms, Black level lock = OFF
-          * Request 2: Exposure = 10ms, Black level lock = ON
-          * Request 3: Exposure = 10ms, Black level lock = ON
-          * Request 4: Exposure = 20ms, Black level lock = ON
-          * Request 5: Exposure = 20ms, Black level lock = ON
-          * Request 6: Exposure = 20ms, Black level lock = ON
-
-          And the exposure change in Request 4 requires the camera
-          device to reset the black level offsets, then the output
-          result metadata is expected to be:
-
-          * Result 1: Exposure = 10ms, Black level lock = OFF
-          * Result 2: Exposure = 10ms, Black level lock = ON
-          * Result 3: Exposure = 10ms, Black level lock = ON
-          * Result 4: Exposure = 20ms, Black level lock = OFF
-          * Result 5: Exposure = 20ms, Black level lock = ON
-          * Result 6: Exposure = 20ms, Black level lock = ON
-
-          This indicates to the application that on frame 4, black
-          levels were reset due to exposure value changes, and pixel
-          values may not be consistent across captures.
-
-          The camera device will maintain the lock to the extent
-          possible, only overriding the lock to OFF when changes to
-          other request parameters require a black level recalculation
-          or reset.
-          </details>
-          <hal_details>
-          If for some reason black level locking is no longer possible
-          (for example, the analog gain has changed, which forces
-          black level offsets to be recalculated), then the HAL must
-          override this request (and it must report 'OFF' when this
-          does happen) until the next capture for which locking is
-          possible again.</hal_details>
-          <tag id="HAL2" />
-        </entry>
-      </controls>
-      <dynamic>
-        <clone entry="android.blackLevel.lock"
-          kind="controls">
-          <details>
-            Whether the black level offset was locked for this frame.  Should be
-            ON if android.blackLevel.lock was ON in the capture request, unless
-            a change in other capture settings forced the camera device to
-            perform a black level reset.
-          </details>
-        </clone>
-      </dynamic>
-    </section>
-    <section name="sync">
-      <dynamic>
-        <entry name="frameNumber" type="int64" visibility="ndk_public"
-               enum="true" hwlevel="legacy">
-          <enum>
-            <value id="-1">CONVERGING
-              <notes>
-              The current result is not yet fully synchronized to any request.
-
-              Synchronization is in progress, and reading metadata from this
-              result may include a mix of data that have taken effect since the
-              last synchronization time.
-
-              In some future result, within android.sync.maxLatency frames,
-              this value will update to the actual frame number frame number
-              the result is guaranteed to be synchronized to (as long as the
-              request settings remain constant).
-            </notes>
-            </value>
-            <value id="-2">UNKNOWN
-              <notes>
-              The current result's synchronization status is unknown.
-
-              The result may have already converged, or it may be in
-              progress.  Reading from this result may include some mix
-              of settings from past requests.
-
-              After a settings change, the new settings will eventually all
-              take effect for the output buffers and results. However, this
-              value will not change when that happens. Altering settings
-              rapidly may provide outcomes using mixes of settings from recent
-              requests.
-
-              This value is intended primarily for backwards compatibility with
-              the older camera implementations (for android.hardware.Camera).
-            </notes>
-            </value>
-          </enum>
-          <description>The frame number corresponding to the last request
-          with which the output result (metadata + buffers) has been fully
-          synchronized.</description>
-          <range>Either a non-negative value corresponding to a
-          `frame_number`, or one of the two enums (CONVERGING / UNKNOWN).
-          </range>
-          <details>
-          When a request is submitted to the camera device, there is usually a
-          delay of several frames before the controls get applied. A camera
-          device may either choose to account for this delay by implementing a
-          pipeline and carefully submit well-timed atomic control updates, or
-          it may start streaming control changes that span over several frame
-          boundaries.
-
-          In the latter case, whenever a request's settings change relative to
-          the previous submitted request, the full set of changes may take
-          multiple frame durations to fully take effect. Some settings may
-          take effect sooner (in less frame durations) than others.
-
-          While a set of control changes are being propagated, this value
-          will be CONVERGING.
-
-          Once it is fully known that a set of control changes have been
-          finished propagating, and the resulting updated control settings
-          have been read back by the camera device, this value will be set
-          to a non-negative frame number (corresponding to the request to
-          which the results have synchronized to).
-
-          Older camera device implementations may not have a way to detect
-          when all camera controls have been applied, and will always set this
-          value to UNKNOWN.
-
-          FULL capability devices will always have this value set to the
-          frame number of the request corresponding to this result.
-
-          _Further details_:
-
-          * Whenever a request differs from the last request, any future
-          results not yet returned may have this value set to CONVERGING (this
-          could include any in-progress captures not yet returned by the camera
-          device, for more details see pipeline considerations below).
-          * Submitting a series of multiple requests that differ from the
-          previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
-          moves the new synchronization frame to the last non-repeating
-          request (using the smallest frame number from the contiguous list of
-          repeating requests).
-          * Submitting the same request repeatedly will not change this value
-          to CONVERGING, if it was already a non-negative value.
-          * When this value changes to non-negative, that means that all of the
-          metadata controls from the request have been applied, all of the
-          metadata controls from the camera device have been read to the
-          updated values (into the result), and all of the graphics buffers
-          corresponding to this result are also synchronized to the request.
-
-          _Pipeline considerations_:
-
-          Submitting a request with updated controls relative to the previously
-          submitted requests may also invalidate the synchronization state
-          of all the results corresponding to currently in-flight requests.
-
-          In other words, results for this current request and up to
-          android.request.pipelineMaxDepth prior requests may have their
-          android.sync.frameNumber change to CONVERGING.
-          </details>
-          <hal_details>
-          Using UNKNOWN here is illegal unless android.sync.maxLatency
-          is also UNKNOWN.
-
-          FULL capability devices should simply set this value to the
-          `frame_number` of the request this result corresponds to.
-          </hal_details>
-          <tag id="V1" />
-        </entry>
-      </dynamic>
-      <static>
-        <entry name="maxLatency" type="int32" visibility="public" enum="true"
-               hwlevel="legacy">
-          <enum>
-            <value id="0">PER_FRAME_CONTROL
-              <notes>
-              Every frame has the requests immediately applied.
-
-              Changing controls over multiple requests one after another will
-              produce results that have those controls applied atomically
-              each frame.
-
-              All FULL capability devices will have this as their maxLatency.
-              </notes>
-            </value>
-            <value id="-1">UNKNOWN
-              <notes>
-              Each new frame has some subset (potentially the entire set)
-              of the past requests applied to the camera settings.
-
-              By submitting a series of identical requests, the camera device
-              will eventually have the camera settings applied, but it is
-              unknown when that exact point will be.
-
-              All LEGACY capability devices will have this as their maxLatency.
-              </notes>
-            </value>
-          </enum>
-          <description>
-          The maximum number of frames that can occur after a request
-          (different than the previous) has been submitted, and before the
-          result's state becomes synchronized.
-          </description>
-          <units>Frame counts</units>
-          <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range>
-          <details>
-          This defines the maximum distance (in number of metadata results),
-          between the frame number of the request that has new controls to apply
-          and the frame number of the result that has all the controls applied.
-
-          In other words this acts as an upper boundary for how many frames
-          must occur before the camera device knows for a fact that the new
-          submitted camera settings have been applied in outgoing frames.
-          </details>
-          <hal_details>
-          For example if maxLatency was 2,
-
-              initial request = X (repeating)
-              request1 = X
-              request2 = Y
-              request3 = Y
-              request4 = Y
-
-              where requestN has frameNumber N, and the first of the repeating
-              initial request's has frameNumber F (and F &lt; 1).
-
-              initial result = X' + { android.sync.frameNumber == F }
-              result1 = X' + { android.sync.frameNumber == F }
-              result2 = X' + { android.sync.frameNumber == CONVERGING }
-              result3 = X' + { android.sync.frameNumber == CONVERGING }
-              result4 = X' + { android.sync.frameNumber == 2 }
-
-              where resultN has frameNumber N.
-
-          Since `result4` has a `frameNumber == 4` and
-          `android.sync.frameNumber == 2`, the distance is clearly
-          `4 - 2 = 2`.
-
-          Use `frame_count` from camera3_request_t instead of
-          android.request.frameCount or
-          `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`.
-
-          LIMITED devices are strongly encouraged to use a non-negative
-          value. If UNKNOWN is used here then app developers do not have a way
-          to know when sensor settings have been applied.
-          </hal_details>
-          <tag id="V1" />
-        </entry>
-      </static>
-    </section>
-    <section name="reprocess">
-      <controls>
-        <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited">
-            <description>
-            The amount of exposure time increase factor applied to the original output
-            frame by the application processing before sending for reprocessing.
-            </description>
-            <units>Relative exposure time increase factor.</units>
-            <range> &amp;gt;= 1.0</range>
-            <details>
-            This is optional, and will be supported if the camera device supports YUV_REPROCESSING
-            capability (android.request.availableCapabilities contains YUV_REPROCESSING).
-
-            For some YUV reprocessing use cases, the application may choose to filter the original
-            output frames to effectively reduce the noise to the same level as a frame that was
-            captured with longer exposure time. To be more specific, assuming the original captured
-            images were captured with a sensitivity of S and an exposure time of T, the model in
-            the camera device is that the amount of noise in the image would be approximately what
-            would be expected if the original capture parameters had been a sensitivity of
-            S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
-            than S and T respectively. If the captured images were processed by the application
-            before being sent for reprocessing, then the application may have used image processing
-            algorithms and/or multi-frame image fusion to reduce the noise in the
-            application-processed images (input images). By using the effectiveExposureFactor
-            control, the application can communicate to the camera device the actual noise level
-            improvement in the application-processed image. With this information, the camera
-            device can select appropriate noise reduction and edge enhancement parameters to avoid
-            excessive noise reduction (android.noiseReduction.mode) and insufficient edge
-            enhancement (android.edge.mode) being applied to the reprocessed frames.
-
-            For example, for multi-frame image fusion use case, the application may fuse
-            multiple output frames together to a final frame for reprocessing. When N image are
-            fused into 1 image for reprocessing, the exposure time increase factor could be up to
-            square root of N (based on a simple photon shot noise model). The camera device will
-            adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
-            produce the best quality images.
-
-            This is relative factor, 1.0 indicates the application hasn't processed the input
-            buffer in a way that affects its effective exposure time.
-
-            This control is only effective for YUV reprocessing capture request. For noise
-            reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`.
-            Similarly, for edge enhancement reprocessing, it is only effective when
-            `android.edge.mode != OFF`.
-            </details>
-          <tag id="REPROC" />
-        </entry>
-      </controls>
-      <dynamic>
-      <clone entry="android.reprocess.effectiveExposureFactor" kind="controls">
-      </clone>
-      </dynamic>
-      <static>
-        <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited">
-          <description>
-          The maximal camera capture pipeline stall (in unit of frame count) introduced by a
-          reprocess capture request.
-          </description>
-          <units>Number of frames.</units>
-          <range> &amp;lt;= 4</range>
-          <details>
-          The key describes the maximal interference that one reprocess (input) request
-          can introduce to the camera simultaneous streaming of regular (output) capture
-          requests, including repeating requests.
-
-          When a reprocessing capture request is submitted while a camera output repeating request
-          (e.g. preview) is being served by the camera device, it may preempt the camera capture
-          pipeline for at least one frame duration so that the camera device is unable to process
-          the following capture request in time for the next sensor start of exposure boundary.
-          When this happens, the application may observe a capture time gap (longer than one frame
-          duration) between adjacent capture output frames, which usually exhibits as preview
-          glitch if the repeating request output targets include a preview surface. This key gives
-          the worst-case number of frame stall introduced by one reprocess request with any kind of
-          formats/sizes combination.
-
-          If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
-          ongoing camera repeating request outputs, as if this reprocess request is never issued.
-
-          This key is supported if the camera device supports PRIVATE or YUV reprocessing (
-          i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or
-          YUV_REPROCESSING).
-          </details>
-          <tag id="REPROC" />
-        </entry>
-      </static>
-    </section>
-    <section name="depth">
-      <static>
-        <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited">
-          <description>Maximum number of points that a depth point cloud may contain.
-          </description>
-          <details>
-            If a camera device supports outputting depth range data in the form of a depth point
-            cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum
-            number of points an output buffer may contain.
-
-            Any given buffer may contain between 0 and maxDepthSamples points, inclusive.
-            If output in the depth point cloud format is not supported, this entry will
-            not be defined.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-        <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public"
-               enum="true" container="array" typedef="streamConfiguration" hwlevel="limited">
-          <array>
-            <size>n</size>
-            <size>4</size>
-          </array>
-          <enum>
-            <value>OUTPUT</value>
-            <value>INPUT</value>
-          </enum>
-          <description>The available depth dataspace stream
-          configurations that this camera device supports
-          (i.e. format, width, height, output/input stream).
-          </description>
-          <details>
-            These are output stream configurations for use with
-            dataSpace HAL_DATASPACE_DEPTH. The configurations are
-            listed as `(format, width, height, input?)` tuples.
-
-            Only devices that support depth output for at least
-            the HAL_PIXEL_FORMAT_Y16 dense depth map may include
-            this entry.
-
-            A device that also supports the HAL_PIXEL_FORMAT_BLOB
-            sparse depth point cloud must report a single entry for
-            the format in this list as `(HAL_PIXEL_FORMAT_BLOB,
-            android.depth.maxDepthSamples, 1, OUTPUT)` in addition to
-            the entries for HAL_PIXEL_FORMAT_Y16.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-        <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public"
-               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
-          <array>
-            <size>4</size>
-            <size>n</size>
-          </array>
-          <description>This lists the minimum frame duration for each
-          format/size combination for depth output formats.
-          </description>
-          <units>(format, width, height, ns) x n</units>
-          <details>
-          This should correspond to the frame duration when only that
-          stream is active, with all processing (typically in android.*.mode)
-          set to either OFF or FAST.
-
-          When multiple streams are used in a request, the minimum frame
-          duration will be max(individual stream min durations).
-
-          The minimum frame duration of a stream (of a particular format, size)
-          is the same regardless of whether the stream is input or output.
-
-          See android.sensor.frameDuration and
-          android.scaler.availableStallDurations for more details about
-          calculating the max frame rate.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-        <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public"
-               container="array" typedef="streamConfigurationDuration" hwlevel="limited">
-          <array>
-            <size>4</size>
-            <size>n</size>
-          </array>
-          <description>This lists the maximum stall duration for each
-          output format/size combination for depth streams.
-          </description>
-          <units>(format, width, height, ns) x n</units>
-          <details>
-          A stall duration is how much extra time would get added
-          to the normal minimum frame duration for a repeating request
-          that has streams with non-zero stall.
-
-          This functions similarly to
-          android.scaler.availableStallDurations for depth
-          streams.
-
-          All depth output stream formats may have a nonzero stall
-          duration.
-          </details>
-          <tag id="DEPTH" />
-        </entry>
-        <entry name="depthIsExclusive" type="byte" visibility="public"
-               enum="true" typedef="boolean" hwlevel="limited">
-          <enum>
-            <value>FALSE</value>
-            <value>TRUE</value>
-          </enum>
-          <description>Indicates whether a capture request may target both a
-          DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
-          YUV_420_888, JPEG, or RAW) simultaneously.
-          </description>
-          <details>
-          If TRUE, including both depth and color outputs in a single
-          capture request is not supported. An application must interleave color
-          and depth requests.  If FALSE, a single request can target both types
-          of output.
-
-          Typically, this restriction exists on camera devices that
-          need to emit a specific pattern or wavelength of light to
-          measure depth values, which causes the color image to be
-          corrupted during depth measurement.
-          </details>
-        </entry>
-      </static>
-    </section>
-  </namespace>
-</metadata>
index 9c2956f..adf18b8 100644 (file)
@@ -61,6 +61,8 @@ typedef enum camera_metadata_section {
     ANDROID_SYNC,
     ANDROID_REPROCESS,
     ANDROID_DEPTH,
+    ANDROID_LOGICAL_MULTI_CAMERA,
+    ANDROID_DISTORTION_CORRECTION,
     ANDROID_SECTION_COUNT,
 
     VENDOR_SECTION = 0x8000
@@ -97,6 +99,12 @@ typedef enum camera_metadata_section_start {
     ANDROID_SYNC_START             = ANDROID_SYNC              << 16,
     ANDROID_REPROCESS_START        = ANDROID_REPROCESS         << 16,
     ANDROID_DEPTH_START            = ANDROID_DEPTH             << 16,
+    ANDROID_LOGICAL_MULTI_CAMERA_START
+                                   = ANDROID_LOGICAL_MULTI_CAMERA
+                                                                << 16,
+    ANDROID_DISTORTION_CORRECTION_START
+                                   = ANDROID_DISTORTION_CORRECTION
+                                                                << 16,
     VENDOR_SECTION_START           = VENDOR_SECTION            << 16
 } camera_metadata_section_start_t;
 
@@ -107,313 +115,335 @@ typedef enum camera_metadata_section_start {
  * system/media/camera/src/camera_metadata_tag_info.c
  */
 typedef enum camera_metadata_tag {
-    ANDROID_COLOR_CORRECTION_MODE =                   // enum         | public
+    ANDROID_COLOR_CORRECTION_MODE =                   // enum         | public       | HIDL v3.2
             ANDROID_COLOR_CORRECTION_START,
-    ANDROID_COLOR_CORRECTION_TRANSFORM,               // rational[]   | public
-    ANDROID_COLOR_CORRECTION_GAINS,                   // float[]      | public
-    ANDROID_COLOR_CORRECTION_ABERRATION_MODE,         // enum         | public
+    ANDROID_COLOR_CORRECTION_TRANSFORM,               // rational[]   | public       | HIDL v3.2
+    ANDROID_COLOR_CORRECTION_GAINS,                   // float[]      | public       | HIDL v3.2
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE,         // enum         | public       | HIDL v3.2
     ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
-                                                      // byte[]       | public
+                                                      // byte[]       | public       | HIDL v3.2
     ANDROID_COLOR_CORRECTION_END,
 
-    ANDROID_CONTROL_AE_ANTIBANDING_MODE =             // enum         | public
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE =             // enum         | public       | HIDL v3.2
             ANDROID_CONTROL_START,
-    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,         // int32        | public
-    ANDROID_CONTROL_AE_LOCK,                          // enum         | public
-    ANDROID_CONTROL_AE_MODE,                          // enum         | public
-    ANDROID_CONTROL_AE_REGIONS,                       // int32[]      | public
-    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,              // int32[]      | public
-    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,            // enum         | public
-    ANDROID_CONTROL_AF_MODE,                          // enum         | public
-    ANDROID_CONTROL_AF_REGIONS,                       // int32[]      | public
-    ANDROID_CONTROL_AF_TRIGGER,                       // enum         | public
-    ANDROID_CONTROL_AWB_LOCK,                         // enum         | public
-    ANDROID_CONTROL_AWB_MODE,                         // enum         | public
-    ANDROID_CONTROL_AWB_REGIONS,                      // int32[]      | public
-    ANDROID_CONTROL_CAPTURE_INTENT,                   // enum         | public
-    ANDROID_CONTROL_EFFECT_MODE,                      // enum         | public
-    ANDROID_CONTROL_MODE,                             // enum         | public
-    ANDROID_CONTROL_SCENE_MODE,                       // enum         | public
-    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,         // enum         | public
-    ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,   // byte[]       | public
-    ANDROID_CONTROL_AE_AVAILABLE_MODES,               // byte[]       | public
-    ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,   // int32[]      | public
-    ANDROID_CONTROL_AE_COMPENSATION_RANGE,            // int32[]      | public
-    ANDROID_CONTROL_AE_COMPENSATION_STEP,             // rational     | public
-    ANDROID_CONTROL_AF_AVAILABLE_MODES,               // byte[]       | public
-    ANDROID_CONTROL_AVAILABLE_EFFECTS,                // byte[]       | public
-    ANDROID_CONTROL_AVAILABLE_SCENE_MODES,            // byte[]       | public
+    ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,         // int32        | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_LOCK,                          // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_MODE,                          // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_REGIONS,                       // int32[]      | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_TARGET_FPS_RANGE,              // int32[]      | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,            // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AF_MODE,                          // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AF_REGIONS,                       // int32[]      | public       | HIDL v3.2
+    ANDROID_CONTROL_AF_TRIGGER,                       // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AWB_LOCK,                         // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE,                         // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AWB_REGIONS,                      // int32[]      | public       | HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT,                   // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE,                      // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_MODE,                             // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE,                       // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,         // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,   // byte[]       | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_AVAILABLE_MODES,               // byte[]       | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,   // int32[]      | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_COMPENSATION_RANGE,            // int32[]      | public       | HIDL v3.2
+    ANDROID_CONTROL_AE_COMPENSATION_STEP,             // rational     | public       | HIDL v3.2
+    ANDROID_CONTROL_AF_AVAILABLE_MODES,               // byte[]       | public       | HIDL v3.2
+    ANDROID_CONTROL_AVAILABLE_EFFECTS,                // byte[]       | public       | HIDL v3.2
+    ANDROID_CONTROL_AVAILABLE_SCENE_MODES,            // byte[]       | public       | HIDL v3.2
     ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
-                                                      // byte[]       | public
-    ANDROID_CONTROL_AWB_AVAILABLE_MODES,              // byte[]       | public
-    ANDROID_CONTROL_MAX_REGIONS,                      // int32[]      | ndk_public
-    ANDROID_CONTROL_SCENE_MODE_OVERRIDES,             // byte[]       | system
-    ANDROID_CONTROL_AE_PRECAPTURE_ID,                 // int32        | system
-    ANDROID_CONTROL_AE_STATE,                         // enum         | public
-    ANDROID_CONTROL_AF_STATE,                         // enum         | public
-    ANDROID_CONTROL_AF_TRIGGER_ID,                    // int32        | system
-    ANDROID_CONTROL_AWB_STATE,                        // enum         | public
+                                                      // byte[]       | public       | HIDL v3.2
+    ANDROID_CONTROL_AWB_AVAILABLE_MODES,              // byte[]       | public       | HIDL v3.2
+    ANDROID_CONTROL_MAX_REGIONS,                      // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_OVERRIDES,             // byte[]       | system       | HIDL v3.2
+    ANDROID_CONTROL_AE_PRECAPTURE_ID,                 // int32        | system       | HIDL v3.2
+    ANDROID_CONTROL_AE_STATE,                         // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AF_STATE,                         // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AF_TRIGGER_ID,                    // int32        | system       | HIDL v3.2
+    ANDROID_CONTROL_AWB_STATE,                        // enum         | public       | HIDL v3.2
     ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
-                                                      // int32[]      | hidden
-    ANDROID_CONTROL_AE_LOCK_AVAILABLE,                // enum         | public
-    ANDROID_CONTROL_AWB_LOCK_AVAILABLE,               // enum         | public
-    ANDROID_CONTROL_AVAILABLE_MODES,                  // byte[]       | public
-    ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE, // int32[]      | public
-    ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,       // int32        | public
-    ANDROID_CONTROL_ENABLE_ZSL,                       // enum         | public
+                                                      // int32[]      | hidden       | HIDL v3.2
+    ANDROID_CONTROL_AE_LOCK_AVAILABLE,                // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE,               // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AVAILABLE_MODES,                  // byte[]       | public       | HIDL v3.2
+    ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE, // int32[]      | public       | HIDL v3.2
+    ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,       // int32        | public       | HIDL v3.2
+    ANDROID_CONTROL_ENABLE_ZSL,                       // enum         | public       | HIDL v3.2
+    ANDROID_CONTROL_AF_SCENE_CHANGE,                  // enum         | public       | HIDL v3.3
     ANDROID_CONTROL_END,
 
-    ANDROID_DEMOSAIC_MODE =                           // enum         | system
+    ANDROID_DEMOSAIC_MODE =                           // enum         | system       | HIDL v3.2
             ANDROID_DEMOSAIC_START,
     ANDROID_DEMOSAIC_END,
 
-    ANDROID_EDGE_MODE =                               // enum         | public
+    ANDROID_EDGE_MODE =                               // enum         | public       | HIDL v3.2
             ANDROID_EDGE_START,
-    ANDROID_EDGE_STRENGTH,                            // byte         | system
-    ANDROID_EDGE_AVAILABLE_EDGE_MODES,                // byte[]       | public
+    ANDROID_EDGE_STRENGTH,                            // byte         | system       | HIDL v3.2
+    ANDROID_EDGE_AVAILABLE_EDGE_MODES,                // byte[]       | public       | HIDL v3.2
     ANDROID_EDGE_END,
 
-    ANDROID_FLASH_FIRING_POWER =                      // byte         | system
+    ANDROID_FLASH_FIRING_POWER =                      // byte         | system       | HIDL v3.2
             ANDROID_FLASH_START,
-    ANDROID_FLASH_FIRING_TIME,                        // int64        | system
-    ANDROID_FLASH_MODE,                               // enum         | public
-    ANDROID_FLASH_COLOR_TEMPERATURE,                  // byte         | system
-    ANDROID_FLASH_MAX_ENERGY,                         // byte         | system
-    ANDROID_FLASH_STATE,                              // enum         | public
+    ANDROID_FLASH_FIRING_TIME,                        // int64        | system       | HIDL v3.2
+    ANDROID_FLASH_MODE,                               // enum         | public       | HIDL v3.2
+    ANDROID_FLASH_COLOR_TEMPERATURE,                  // byte         | system       | HIDL v3.2
+    ANDROID_FLASH_MAX_ENERGY,                         // byte         | system       | HIDL v3.2
+    ANDROID_FLASH_STATE,                              // enum         | public       | HIDL v3.2
     ANDROID_FLASH_END,
 
-    ANDROID_FLASH_INFO_AVAILABLE =                    // enum         | public
+    ANDROID_FLASH_INFO_AVAILABLE =                    // enum         | public       | HIDL v3.2
             ANDROID_FLASH_INFO_START,
-    ANDROID_FLASH_INFO_CHARGE_DURATION,               // int64        | system
+    ANDROID_FLASH_INFO_CHARGE_DURATION,               // int64        | system       | HIDL v3.2
     ANDROID_FLASH_INFO_END,
 
-    ANDROID_HOT_PIXEL_MODE =                          // enum         | public
+    ANDROID_HOT_PIXEL_MODE =                          // enum         | public       | HIDL v3.2
             ANDROID_HOT_PIXEL_START,
-    ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,      // byte[]       | public
+    ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,      // byte[]       | public       | HIDL v3.2
     ANDROID_HOT_PIXEL_END,
 
-    ANDROID_JPEG_GPS_COORDINATES =                    // double[]     | ndk_public
+    ANDROID_JPEG_GPS_COORDINATES =                    // double[]     | ndk_public   | HIDL v3.2
             ANDROID_JPEG_START,
-    ANDROID_JPEG_GPS_PROCESSING_METHOD,               // byte         | ndk_public
-    ANDROID_JPEG_GPS_TIMESTAMP,                       // int64        | ndk_public
-    ANDROID_JPEG_ORIENTATION,                         // int32        | public
-    ANDROID_JPEG_QUALITY,                             // byte         | public
-    ANDROID_JPEG_THUMBNAIL_QUALITY,                   // byte         | public
-    ANDROID_JPEG_THUMBNAIL_SIZE,                      // int32[]      | public
-    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,           // int32[]      | public
-    ANDROID_JPEG_MAX_SIZE,                            // int32        | system
-    ANDROID_JPEG_SIZE,                                // int32        | system
+    ANDROID_JPEG_GPS_PROCESSING_METHOD,               // byte         | ndk_public   | HIDL v3.2
+    ANDROID_JPEG_GPS_TIMESTAMP,                       // int64        | ndk_public   | HIDL v3.2
+    ANDROID_JPEG_ORIENTATION,                         // int32        | public       | HIDL v3.2
+    ANDROID_JPEG_QUALITY,                             // byte         | public       | HIDL v3.2
+    ANDROID_JPEG_THUMBNAIL_QUALITY,                   // byte         | public       | HIDL v3.2
+    ANDROID_JPEG_THUMBNAIL_SIZE,                      // int32[]      | public       | HIDL v3.2
+    ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,           // int32[]      | public       | HIDL v3.2
+    ANDROID_JPEG_MAX_SIZE,                            // int32        | system       | HIDL v3.2
+    ANDROID_JPEG_SIZE,                                // int32        | system       | HIDL v3.2
     ANDROID_JPEG_END,
 
-    ANDROID_LENS_APERTURE =                           // float        | public
+    ANDROID_LENS_APERTURE =                           // float        | public       | HIDL v3.2
             ANDROID_LENS_START,
-    ANDROID_LENS_FILTER_DENSITY,                      // float        | public
-    ANDROID_LENS_FOCAL_LENGTH,                        // float        | public
-    ANDROID_LENS_FOCUS_DISTANCE,                      // float        | public
-    ANDROID_LENS_OPTICAL_STABILIZATION_MODE,          // enum         | public
-    ANDROID_LENS_FACING,                              // enum         | public
-    ANDROID_LENS_POSE_ROTATION,                       // float[]      | public
-    ANDROID_LENS_POSE_TRANSLATION,                    // float[]      | public
-    ANDROID_LENS_FOCUS_RANGE,                         // float[]      | public
-    ANDROID_LENS_STATE,                               // enum         | public
-    ANDROID_LENS_INTRINSIC_CALIBRATION,               // float[]      | public
-    ANDROID_LENS_RADIAL_DISTORTION,                   // float[]      | public
+    ANDROID_LENS_FILTER_DENSITY,                      // float        | public       | HIDL v3.2
+    ANDROID_LENS_FOCAL_LENGTH,                        // float        | public       | HIDL v3.2
+    ANDROID_LENS_FOCUS_DISTANCE,                      // float        | public       | HIDL v3.2
+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE,          // enum         | public       | HIDL v3.2
+    ANDROID_LENS_FACING,                              // enum         | public       | HIDL v3.2
+    ANDROID_LENS_POSE_ROTATION,                       // float[]      | public       | HIDL v3.2
+    ANDROID_LENS_POSE_TRANSLATION,                    // float[]      | public       | HIDL v3.2
+    ANDROID_LENS_FOCUS_RANGE,                         // float[]      | public       | HIDL v3.2
+    ANDROID_LENS_STATE,                               // enum         | public       | HIDL v3.2
+    ANDROID_LENS_INTRINSIC_CALIBRATION,               // float[]      | public       | HIDL v3.2
+    ANDROID_LENS_RADIAL_DISTORTION,                   // float[]      | public       | HIDL v3.2
+    ANDROID_LENS_POSE_REFERENCE,                      // enum         | public       | HIDL v3.3
+    ANDROID_LENS_DISTORTION,                          // float[]      | public       | HIDL v3.3
     ANDROID_LENS_END,
 
-    ANDROID_LENS_INFO_AVAILABLE_APERTURES =           // float[]      | public
+    ANDROID_LENS_INFO_AVAILABLE_APERTURES =           // float[]      | public       | HIDL v3.2
             ANDROID_LENS_INFO_START,
-    ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,     // float[]      | public
-    ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,        // float[]      | public
-    ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,// byte[]       | public
-    ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,            // float        | public
-    ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,         // float        | public
-    ANDROID_LENS_INFO_SHADING_MAP_SIZE,               // int32[]      | ndk_public
-    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,     // enum         | public
+    ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,     // float[]      | public       | HIDL v3.2
+    ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,        // float[]      | public       | HIDL v3.2
+    ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,// byte[]       | public       | HIDL v3.2
+    ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,            // float        | public       | HIDL v3.2
+    ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,         // float        | public       | HIDL v3.2
+    ANDROID_LENS_INFO_SHADING_MAP_SIZE,               // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,     // enum         | public       | HIDL v3.2
     ANDROID_LENS_INFO_END,
 
-    ANDROID_NOISE_REDUCTION_MODE =                    // enum         | public
+    ANDROID_NOISE_REDUCTION_MODE =                    // enum         | public       | HIDL v3.2
             ANDROID_NOISE_REDUCTION_START,
-    ANDROID_NOISE_REDUCTION_STRENGTH,                 // byte         | system
+    ANDROID_NOISE_REDUCTION_STRENGTH,                 // byte         | system       | HIDL v3.2
     ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
-                                                      // byte[]       | public
+                                                      // byte[]       | public       | HIDL v3.2
     ANDROID_NOISE_REDUCTION_END,
 
-    ANDROID_QUIRKS_METERING_CROP_REGION =             // byte         | system
+    ANDROID_QUIRKS_METERING_CROP_REGION =             // byte         | system       | HIDL v3.2
             ANDROID_QUIRKS_START,
-    ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO,              // byte         | system
-    ANDROID_QUIRKS_USE_ZSL_FORMAT,                    // byte         | system
-    ANDROID_QUIRKS_USE_PARTIAL_RESULT,                // byte         | hidden
-    ANDROID_QUIRKS_PARTIAL_RESULT,                    // enum         | hidden
+    ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO,              // byte         | system       | HIDL v3.2
+    ANDROID_QUIRKS_USE_ZSL_FORMAT,                    // byte         | system       | HIDL v3.2
+    ANDROID_QUIRKS_USE_PARTIAL_RESULT,                // byte         | hidden       | HIDL v3.2
+    ANDROID_QUIRKS_PARTIAL_RESULT,                    // enum         | hidden       | HIDL v3.2
     ANDROID_QUIRKS_END,
 
-    ANDROID_REQUEST_FRAME_COUNT =                     // int32        | hidden
+    ANDROID_REQUEST_FRAME_COUNT =                     // int32        | hidden       | HIDL v3.2
             ANDROID_REQUEST_START,
-    ANDROID_REQUEST_ID,                               // int32        | hidden
-    ANDROID_REQUEST_INPUT_STREAMS,                    // int32[]      | system
-    ANDROID_REQUEST_METADATA_MODE,                    // enum         | system
-    ANDROID_REQUEST_OUTPUT_STREAMS,                   // int32[]      | system
-    ANDROID_REQUEST_TYPE,                             // enum         | system
-    ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,           // int32[]      | ndk_public
-    ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS,        // int32[]      | system
-    ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,            // int32        | public
-    ANDROID_REQUEST_PIPELINE_DEPTH,                   // byte         | public
-    ANDROID_REQUEST_PIPELINE_MAX_DEPTH,               // byte         | public
-    ANDROID_REQUEST_PARTIAL_RESULT_COUNT,             // int32        | public
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES,           // enum[]       | public
-    ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,           // int32[]      | ndk_public
-    ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,            // int32[]      | ndk_public
-    ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,   // int32[]      | ndk_public
+    ANDROID_REQUEST_ID,                               // int32        | hidden       | HIDL v3.2
+    ANDROID_REQUEST_INPUT_STREAMS,                    // int32[]      | system       | HIDL v3.2
+    ANDROID_REQUEST_METADATA_MODE,                    // enum         | system       | HIDL v3.2
+    ANDROID_REQUEST_OUTPUT_STREAMS,                   // int32[]      | system       | HIDL v3.2
+    ANDROID_REQUEST_TYPE,                             // enum         | system       | HIDL v3.2
+    ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,           // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS,        // int32[]      | system       | HIDL v3.2
+    ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,            // int32        | java_public  | HIDL v3.2
+    ANDROID_REQUEST_PIPELINE_DEPTH,                   // byte         | public       | HIDL v3.2
+    ANDROID_REQUEST_PIPELINE_MAX_DEPTH,               // byte         | public       | HIDL v3.2
+    ANDROID_REQUEST_PARTIAL_RESULT_COUNT,             // int32        | public       | HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES,           // enum[]       | public       | HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,           // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,            // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,   // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,           // int32[]      | ndk_public   | HIDL v3.3
+    ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
+                                                      // int32[]      | hidden       | HIDL v3.3
     ANDROID_REQUEST_END,
 
-    ANDROID_SCALER_CROP_REGION =                      // int32[]      | public
+    ANDROID_SCALER_CROP_REGION =                      // int32[]      | public       | HIDL v3.2
             ANDROID_SCALER_START,
-    ANDROID_SCALER_AVAILABLE_FORMATS,                 // enum[]       | hidden
-    ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,      // int64[]      | hidden
-    ANDROID_SCALER_AVAILABLE_JPEG_SIZES,              // int32[]      | hidden
-    ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,        // float        | public
-    ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, // int64[]      | hidden
-    ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,         // int32[]      | hidden
-    ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,       // int64[]      | system
-    ANDROID_SCALER_AVAILABLE_RAW_SIZES,               // int32[]      | system
-    ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,// int32        | hidden
-    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,   // enum[]       | ndk_public
-    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,     // int64[]      | ndk_public
-    ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,         // int64[]      | ndk_public
-    ANDROID_SCALER_CROPPING_TYPE,                     // enum         | public
+    ANDROID_SCALER_AVAILABLE_FORMATS,                 // enum[]       | hidden       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,      // int64[]      | hidden       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_JPEG_SIZES,              // int32[]      | hidden       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,        // float        | public       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, // int64[]      | hidden       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,         // int32[]      | hidden       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,       // int64[]      | system       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_RAW_SIZES,               // int32[]      | system       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,// int32        | hidden       | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,   // enum[]       | ndk_public   | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,     // int64[]      | ndk_public   | HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,         // int64[]      | ndk_public   | HIDL v3.2
+    ANDROID_SCALER_CROPPING_TYPE,                     // enum         | public       | HIDL v3.2
     ANDROID_SCALER_END,
 
-    ANDROID_SENSOR_EXPOSURE_TIME =                    // int64        | public
+    ANDROID_SENSOR_EXPOSURE_TIME =                    // int64        | public       | HIDL v3.2
             ANDROID_SENSOR_START,
-    ANDROID_SENSOR_FRAME_DURATION,                    // int64        | public
-    ANDROID_SENSOR_SENSITIVITY,                       // int32        | public
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1,             // enum         | public
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT2,             // byte         | public
-    ANDROID_SENSOR_CALIBRATION_TRANSFORM1,            // rational[]   | public
-    ANDROID_SENSOR_CALIBRATION_TRANSFORM2,            // rational[]   | public
-    ANDROID_SENSOR_COLOR_TRANSFORM1,                  // rational[]   | public
-    ANDROID_SENSOR_COLOR_TRANSFORM2,                  // rational[]   | public
-    ANDROID_SENSOR_FORWARD_MATRIX1,                   // rational[]   | public
-    ANDROID_SENSOR_FORWARD_MATRIX2,                   // rational[]   | public
-    ANDROID_SENSOR_BASE_GAIN_FACTOR,                  // rational     | system
-    ANDROID_SENSOR_BLACK_LEVEL_PATTERN,               // int32[]      | public
-    ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,            // int32        | public
-    ANDROID_SENSOR_ORIENTATION,                       // int32        | public
-    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS,    // int32[]      | system
-    ANDROID_SENSOR_TIMESTAMP,                         // int64        | public
-    ANDROID_SENSOR_TEMPERATURE,                       // float        | system
-    ANDROID_SENSOR_NEUTRAL_COLOR_POINT,               // rational[]   | public
-    ANDROID_SENSOR_NOISE_PROFILE,                     // double[]     | public
-    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,               // float[]      | system
-    ANDROID_SENSOR_PROFILE_TONE_CURVE,                // float[]      | system
-    ANDROID_SENSOR_GREEN_SPLIT,                       // float        | public
-    ANDROID_SENSOR_TEST_PATTERN_DATA,                 // int32[]      | public
-    ANDROID_SENSOR_TEST_PATTERN_MODE,                 // enum         | public
-    ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,      // int32[]      | public
-    ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,              // int64        | public
-    ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,             // int32[]      | public
-    ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,               // float[]      | public
-    ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,               // int32        | public
-    ANDROID_SENSOR_OPAQUE_RAW_SIZE,                   // int32[]      | system
+    ANDROID_SENSOR_FRAME_DURATION,                    // int64        | public       | HIDL v3.2
+    ANDROID_SENSOR_SENSITIVITY,                       // int32        | public       | HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1,             // enum         | public       | HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT2,             // byte         | public       | HIDL v3.2
+    ANDROID_SENSOR_CALIBRATION_TRANSFORM1,            // rational[]   | public       | HIDL v3.2
+    ANDROID_SENSOR_CALIBRATION_TRANSFORM2,            // rational[]   | public       | HIDL v3.2
+    ANDROID_SENSOR_COLOR_TRANSFORM1,                  // rational[]   | public       | HIDL v3.2
+    ANDROID_SENSOR_COLOR_TRANSFORM2,                  // rational[]   | public       | HIDL v3.2
+    ANDROID_SENSOR_FORWARD_MATRIX1,                   // rational[]   | public       | HIDL v3.2
+    ANDROID_SENSOR_FORWARD_MATRIX2,                   // rational[]   | public       | HIDL v3.2
+    ANDROID_SENSOR_BASE_GAIN_FACTOR,                  // rational     | system       | HIDL v3.2
+    ANDROID_SENSOR_BLACK_LEVEL_PATTERN,               // int32[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,            // int32        | public       | HIDL v3.2
+    ANDROID_SENSOR_ORIENTATION,                       // int32        | public       | HIDL v3.2
+    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS,    // int32[]      | system       | HIDL v3.2
+    ANDROID_SENSOR_TIMESTAMP,                         // int64        | public       | HIDL v3.2
+    ANDROID_SENSOR_TEMPERATURE,                       // float        | system       | HIDL v3.2
+    ANDROID_SENSOR_NEUTRAL_COLOR_POINT,               // rational[]   | public       | HIDL v3.2
+    ANDROID_SENSOR_NOISE_PROFILE,                     // double[]     | public       | HIDL v3.2
+    ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,               // float[]      | system       | HIDL v3.2
+    ANDROID_SENSOR_PROFILE_TONE_CURVE,                // float[]      | system       | HIDL v3.2
+    ANDROID_SENSOR_GREEN_SPLIT,                       // float        | public       | HIDL v3.2
+    ANDROID_SENSOR_TEST_PATTERN_DATA,                 // int32[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_TEST_PATTERN_MODE,                 // enum         | public       | HIDL v3.2
+    ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,      // int32[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,              // int64        | public       | HIDL v3.2
+    ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,             // int32[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,               // float[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,               // int32        | public       | HIDL v3.2
+    ANDROID_SENSOR_OPAQUE_RAW_SIZE,                   // int32[]      | system       | HIDL v3.2
     ANDROID_SENSOR_END,
 
-    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE =           // int32[]      | public
+    ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE =           // int32[]      | public       | HIDL v3.2
             ANDROID_SENSOR_INFO_START,
-    ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,            // int32[]      | public
-    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,     // enum         | public
-    ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,          // int64[]      | public
-    ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,           // int64        | public
-    ANDROID_SENSOR_INFO_PHYSICAL_SIZE,                // float[]      | public
-    ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,             // int32[]      | public
-    ANDROID_SENSOR_INFO_WHITE_LEVEL,                  // int32        | public
-    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,             // enum         | public
-    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED,         // enum         | public
+    ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,            // int32[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,     // enum         | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,          // int64[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,           // int64        | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_PHYSICAL_SIZE,                // float[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,             // int32[]      | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_WHITE_LEVEL,                  // int32        | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,             // enum         | public       | HIDL v3.2
+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED,         // enum         | public       | HIDL v3.2
     ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
-                                                      // int32[]      | public
+                                                      // int32[]      | public       | HIDL v3.2
     ANDROID_SENSOR_INFO_END,
 
-    ANDROID_SHADING_MODE =                            // enum         | public
+    ANDROID_SHADING_MODE =                            // enum         | public       | HIDL v3.2
             ANDROID_SHADING_START,
-    ANDROID_SHADING_STRENGTH,                         // byte         | system
-    ANDROID_SHADING_AVAILABLE_MODES,                  // byte[]       | public
+    ANDROID_SHADING_STRENGTH,                         // byte         | system       | HIDL v3.2
+    ANDROID_SHADING_AVAILABLE_MODES,                  // byte[]       | public       | HIDL v3.2
     ANDROID_SHADING_END,
 
-    ANDROID_STATISTICS_FACE_DETECT_MODE =             // enum         | public
+    ANDROID_STATISTICS_FACE_DETECT_MODE =             // enum         | public       | HIDL v3.2
             ANDROID_STATISTICS_START,
-    ANDROID_STATISTICS_HISTOGRAM_MODE,                // enum         | system
-    ANDROID_STATISTICS_SHARPNESS_MAP_MODE,            // enum         | system
-    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,            // enum         | public
-    ANDROID_STATISTICS_FACE_IDS,                      // int32[]      | ndk_public
-    ANDROID_STATISTICS_FACE_LANDMARKS,                // int32[]      | ndk_public
-    ANDROID_STATISTICS_FACE_RECTANGLES,               // int32[]      | ndk_public
-    ANDROID_STATISTICS_FACE_SCORES,                   // byte[]       | ndk_public
-    ANDROID_STATISTICS_HISTOGRAM,                     // int32[]      | system
-    ANDROID_STATISTICS_SHARPNESS_MAP,                 // int32[]      | system
-    ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP,   // byte         | java_public
-    ANDROID_STATISTICS_LENS_SHADING_MAP,              // float[]      | ndk_public
-    ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,         // float[]      | hidden
-    ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,     // rational[]   | hidden
-    ANDROID_STATISTICS_SCENE_FLICKER,                 // enum         | public
-    ANDROID_STATISTICS_HOT_PIXEL_MAP,                 // int32[]      | public
-    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,         // enum         | public
+    ANDROID_STATISTICS_HISTOGRAM_MODE,                // enum         | system       | HIDL v3.2
+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE,            // enum         | system       | HIDL v3.2
+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,            // enum         | public       | HIDL v3.2
+    ANDROID_STATISTICS_FACE_IDS,                      // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_STATISTICS_FACE_LANDMARKS,                // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_STATISTICS_FACE_RECTANGLES,               // int32[]      | ndk_public   | HIDL v3.2
+    ANDROID_STATISTICS_FACE_SCORES,                   // byte[]       | ndk_public   | HIDL v3.2
+    ANDROID_STATISTICS_HISTOGRAM,                     // int32[]      | system       | HIDL v3.2
+    ANDROID_STATISTICS_SHARPNESS_MAP,                 // int32[]      | system       | HIDL v3.2
+    ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP,   // byte         | java_public  | HIDL v3.2
+    ANDROID_STATISTICS_LENS_SHADING_MAP,              // float[]      | ndk_public   | HIDL v3.2
+    ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,         // float[]      | hidden       | HIDL v3.2
+    ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,     // rational[]   | hidden       | HIDL v3.2
+    ANDROID_STATISTICS_SCENE_FLICKER,                 // enum         | public       | HIDL v3.2
+    ANDROID_STATISTICS_HOT_PIXEL_MAP,                 // int32[]      | public       | HIDL v3.2
+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,         // enum         | public       | HIDL v3.2
+    ANDROID_STATISTICS_OIS_DATA_MODE,                 // enum         | public       | HIDL v3.3
+    ANDROID_STATISTICS_OIS_TIMESTAMPS,                // int64[]      | ndk_public   | HIDL v3.3
+    ANDROID_STATISTICS_OIS_X_SHIFTS,                  // float[]      | ndk_public   | HIDL v3.3
+    ANDROID_STATISTICS_OIS_Y_SHIFTS,                  // float[]      | ndk_public   | HIDL v3.3
     ANDROID_STATISTICS_END,
 
     ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = 
-                                                      // byte[]       | public
+                                                      // byte[]       | public       | HIDL v3.2
             ANDROID_STATISTICS_INFO_START,
-    ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,   // int32        | system
-    ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,           // int32        | public
-    ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,      // int32        | system
-    ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,  // int32        | system
-    ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,       // int32[]      | system
+    ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,   // int32        | system       | HIDL v3.2
+    ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,           // int32        | public       | HIDL v3.2
+    ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,      // int32        | system       | HIDL v3.2
+    ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,  // int32        | system       | HIDL v3.2
+    ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,       // int32[]      | system       | HIDL v3.2
     ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
-                                                      // byte[]       | public
+                                                      // byte[]       | public       | HIDL v3.2
     ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
-                                                      // byte[]       | public
+                                                      // byte[]       | public       | HIDL v3.2
+    ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES, // byte[]       | public       | HIDL v3.3
     ANDROID_STATISTICS_INFO_END,
 
-    ANDROID_TONEMAP_CURVE_BLUE =                      // float[]      | ndk_public
+    ANDROID_TONEMAP_CURVE_BLUE =                      // float[]      | ndk_public   | HIDL v3.2
             ANDROID_TONEMAP_START,
-    ANDROID_TONEMAP_CURVE_GREEN,                      // float[]      | ndk_public
-    ANDROID_TONEMAP_CURVE_RED,                        // float[]      | ndk_public
-    ANDROID_TONEMAP_MODE,                             // enum         | public
-    ANDROID_TONEMAP_MAX_CURVE_POINTS,                 // int32        | public
-    ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,         // byte[]       | public
-    ANDROID_TONEMAP_GAMMA,                            // float        | public
-    ANDROID_TONEMAP_PRESET_CURVE,                     // enum         | public
+    ANDROID_TONEMAP_CURVE_GREEN,                      // float[]      | ndk_public   | HIDL v3.2
+    ANDROID_TONEMAP_CURVE_RED,                        // float[]      | ndk_public   | HIDL v3.2
+    ANDROID_TONEMAP_MODE,                             // enum         | public       | HIDL v3.2
+    ANDROID_TONEMAP_MAX_CURVE_POINTS,                 // int32        | public       | HIDL v3.2
+    ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,         // byte[]       | public       | HIDL v3.2
+    ANDROID_TONEMAP_GAMMA,                            // float        | public       | HIDL v3.2
+    ANDROID_TONEMAP_PRESET_CURVE,                     // enum         | public       | HIDL v3.2
     ANDROID_TONEMAP_END,
 
-    ANDROID_LED_TRANSMIT =                            // enum         | hidden
+    ANDROID_LED_TRANSMIT =                            // enum         | hidden       | HIDL v3.2
             ANDROID_LED_START,
-    ANDROID_LED_AVAILABLE_LEDS,                       // enum[]       | hidden
+    ANDROID_LED_AVAILABLE_LEDS,                       // enum[]       | hidden       | HIDL v3.2
     ANDROID_LED_END,
 
-    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL =           // enum         | public
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL =           // enum         | public       | HIDL v3.2
             ANDROID_INFO_START,
+    ANDROID_INFO_VERSION,                             // byte         | public       | HIDL v3.3
     ANDROID_INFO_END,
 
-    ANDROID_BLACK_LEVEL_LOCK =                        // enum         | public
+    ANDROID_BLACK_LEVEL_LOCK =                        // enum         | public       | HIDL v3.2
             ANDROID_BLACK_LEVEL_START,
     ANDROID_BLACK_LEVEL_END,
 
-    ANDROID_SYNC_FRAME_NUMBER =                       // enum         | ndk_public
+    ANDROID_SYNC_FRAME_NUMBER =                       // enum         | ndk_public   | HIDL v3.2
             ANDROID_SYNC_START,
-    ANDROID_SYNC_MAX_LATENCY,                         // enum         | public
+    ANDROID_SYNC_MAX_LATENCY,                         // enum         | public       | HIDL v3.2
     ANDROID_SYNC_END,
 
-    ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =     // float        | java_public
+    ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =     // float        | java_public  | HIDL v3.2
             ANDROID_REPROCESS_START,
-    ANDROID_REPROCESS_MAX_CAPTURE_STALL,              // int32        | java_public
+    ANDROID_REPROCESS_MAX_CAPTURE_STALL,              // int32        | java_public  | HIDL v3.2
     ANDROID_REPROCESS_END,
 
-    ANDROID_DEPTH_MAX_DEPTH_SAMPLES =                 // int32        | system
+    ANDROID_DEPTH_MAX_DEPTH_SAMPLES =                 // int32        | system       | HIDL v3.2
             ANDROID_DEPTH_START,
     ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
-                                                      // enum[]       | ndk_public
-    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,// int64[]      | ndk_public
-    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,    // int64[]      | ndk_public
-    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,                 // enum         | public
+                                                      // enum[]       | ndk_public   | HIDL v3.2
+    ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,// int64[]      | ndk_public   | HIDL v3.2
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,    // int64[]      | ndk_public   | HIDL v3.2
+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,                 // enum         | public       | HIDL v3.2
     ANDROID_DEPTH_END,
 
+    ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS =       // byte[]       | hidden       | HIDL v3.3
+            ANDROID_LOGICAL_MULTI_CAMERA_START,
+    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE,    // enum         | public       | HIDL v3.3
+    ANDROID_LOGICAL_MULTI_CAMERA_END,
+
+    ANDROID_DISTORTION_CORRECTION_MODE =              // enum         | public       | HIDL v3.3
+            ANDROID_DISTORTION_CORRECTION_START,
+    ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES,    // byte[]       | public       | HIDL v3.3
+    ANDROID_DISTORTION_CORRECTION_END,
+
 } camera_metadata_tag_t;
 
 /**
@@ -422,514 +452,554 @@ typedef enum camera_metadata_tag {
 
 // ANDROID_COLOR_CORRECTION_MODE
 typedef enum camera_metadata_enum_android_color_correction_mode {
-    ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX,
-    ANDROID_COLOR_CORRECTION_MODE_FAST,
-    ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY,
+    ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX                  , // HIDL v3.2
+    ANDROID_COLOR_CORRECTION_MODE_FAST                              , // HIDL v3.2
+    ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY                      , // HIDL v3.2
 } camera_metadata_enum_android_color_correction_mode_t;
 
 // ANDROID_COLOR_CORRECTION_ABERRATION_MODE
 typedef enum camera_metadata_enum_android_color_correction_aberration_mode {
-    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
-    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
-    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF                    , // HIDL v3.2
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST                   , // HIDL v3.2
+    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY           , // HIDL v3.2
 } camera_metadata_enum_android_color_correction_aberration_mode_t;
 
 
 // ANDROID_CONTROL_AE_ANTIBANDING_MODE
 typedef enum camera_metadata_enum_android_control_ae_antibanding_mode {
-    ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
-    ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
-    ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
-    ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF                         , // HIDL v3.2
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ                        , // HIDL v3.2
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ                        , // HIDL v3.2
+    ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO                        , // HIDL v3.2
 } camera_metadata_enum_android_control_ae_antibanding_mode_t;
 
 // ANDROID_CONTROL_AE_LOCK
 typedef enum camera_metadata_enum_android_control_ae_lock {
-    ANDROID_CONTROL_AE_LOCK_OFF,
-    ANDROID_CONTROL_AE_LOCK_ON,
+    ANDROID_CONTROL_AE_LOCK_OFF                                     , // HIDL v3.2
+    ANDROID_CONTROL_AE_LOCK_ON                                      , // HIDL v3.2
 } camera_metadata_enum_android_control_ae_lock_t;
 
 // ANDROID_CONTROL_AE_MODE
 typedef enum camera_metadata_enum_android_control_ae_mode {
-    ANDROID_CONTROL_AE_MODE_OFF,
-    ANDROID_CONTROL_AE_MODE_ON,
-    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,
-    ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,
-    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE,
+    ANDROID_CONTROL_AE_MODE_OFF                                     , // HIDL v3.2
+    ANDROID_CONTROL_AE_MODE_ON                                      , // HIDL v3.2
+    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH                           , // HIDL v3.2
+    ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH                         , // HIDL v3.2
+    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE                    , // HIDL v3.2
+    ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH                       , // HIDL v3.3
 } camera_metadata_enum_android_control_ae_mode_t;
 
 // ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER
 typedef enum camera_metadata_enum_android_control_ae_precapture_trigger {
-    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE,
-    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START,
-    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL,
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE                      , // HIDL v3.2
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START                     , // HIDL v3.2
+    ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL                    , // HIDL v3.2
 } camera_metadata_enum_android_control_ae_precapture_trigger_t;
 
 // ANDROID_CONTROL_AF_MODE
 typedef enum camera_metadata_enum_android_control_af_mode {
-    ANDROID_CONTROL_AF_MODE_OFF,
-    ANDROID_CONTROL_AF_MODE_AUTO,
-    ANDROID_CONTROL_AF_MODE_MACRO,
-    ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
-    ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE,
-    ANDROID_CONTROL_AF_MODE_EDOF,
+    ANDROID_CONTROL_AF_MODE_OFF                                     , // HIDL v3.2
+    ANDROID_CONTROL_AF_MODE_AUTO                                    , // HIDL v3.2
+    ANDROID_CONTROL_AF_MODE_MACRO                                   , // HIDL v3.2
+    ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO                        , // HIDL v3.2
+    ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE                      , // HIDL v3.2
+    ANDROID_CONTROL_AF_MODE_EDOF                                    , // HIDL v3.2
 } camera_metadata_enum_android_control_af_mode_t;
 
 // ANDROID_CONTROL_AF_TRIGGER
 typedef enum camera_metadata_enum_android_control_af_trigger {
-    ANDROID_CONTROL_AF_TRIGGER_IDLE,
-    ANDROID_CONTROL_AF_TRIGGER_START,
-    ANDROID_CONTROL_AF_TRIGGER_CANCEL,
+    ANDROID_CONTROL_AF_TRIGGER_IDLE                                 , // HIDL v3.2
+    ANDROID_CONTROL_AF_TRIGGER_START                                , // HIDL v3.2
+    ANDROID_CONTROL_AF_TRIGGER_CANCEL                               , // HIDL v3.2
 } camera_metadata_enum_android_control_af_trigger_t;
 
 // ANDROID_CONTROL_AWB_LOCK
 typedef enum camera_metadata_enum_android_control_awb_lock {
-    ANDROID_CONTROL_AWB_LOCK_OFF,
-    ANDROID_CONTROL_AWB_LOCK_ON,
+    ANDROID_CONTROL_AWB_LOCK_OFF                                    , // HIDL v3.2
+    ANDROID_CONTROL_AWB_LOCK_ON                                     , // HIDL v3.2
 } camera_metadata_enum_android_control_awb_lock_t;
 
 // ANDROID_CONTROL_AWB_MODE
 typedef enum camera_metadata_enum_android_control_awb_mode {
-    ANDROID_CONTROL_AWB_MODE_OFF,
-    ANDROID_CONTROL_AWB_MODE_AUTO,
-    ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
-    ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
-    ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,
-    ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
-    ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT,
-    ANDROID_CONTROL_AWB_MODE_TWILIGHT,
-    ANDROID_CONTROL_AWB_MODE_SHADE,
+    ANDROID_CONTROL_AWB_MODE_OFF                                    , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_AUTO                                   , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_INCANDESCENT                           , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_FLUORESCENT                            , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT                       , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_DAYLIGHT                               , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT                        , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_TWILIGHT                               , // HIDL v3.2
+    ANDROID_CONTROL_AWB_MODE_SHADE                                  , // HIDL v3.2
 } camera_metadata_enum_android_control_awb_mode_t;
 
 // ANDROID_CONTROL_CAPTURE_INTENT
 typedef enum camera_metadata_enum_android_control_capture_intent {
-    ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM,
-    ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW,
-    ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE,
-    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD,
-    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT,
-    ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG,
-    ANDROID_CONTROL_CAPTURE_INTENT_MANUAL,
+    ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM                           , // HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW                          , // HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE                    , // HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD                     , // HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT                   , // HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG                 , // HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT_MANUAL                           , // HIDL v3.2
+    ANDROID_CONTROL_CAPTURE_INTENT_MOTION_TRACKING                  , // HIDL v3.3
 } camera_metadata_enum_android_control_capture_intent_t;
 
 // ANDROID_CONTROL_EFFECT_MODE
 typedef enum camera_metadata_enum_android_control_effect_mode {
-    ANDROID_CONTROL_EFFECT_MODE_OFF,
-    ANDROID_CONTROL_EFFECT_MODE_MONO,
-    ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,
-    ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,
-    ANDROID_CONTROL_EFFECT_MODE_SEPIA,
-    ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,
-    ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD,
-    ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD,
-    ANDROID_CONTROL_EFFECT_MODE_AQUA,
+    ANDROID_CONTROL_EFFECT_MODE_OFF                                 , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_MONO                                , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_NEGATIVE                            , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_SOLARIZE                            , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_SEPIA                               , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_POSTERIZE                           , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD                          , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD                          , // HIDL v3.2
+    ANDROID_CONTROL_EFFECT_MODE_AQUA                                , // HIDL v3.2
 } camera_metadata_enum_android_control_effect_mode_t;
 
 // ANDROID_CONTROL_MODE
 typedef enum camera_metadata_enum_android_control_mode {
-    ANDROID_CONTROL_MODE_OFF,
-    ANDROID_CONTROL_MODE_AUTO,
-    ANDROID_CONTROL_MODE_USE_SCENE_MODE,
-    ANDROID_CONTROL_MODE_OFF_KEEP_STATE,
+    ANDROID_CONTROL_MODE_OFF                                        , // HIDL v3.2
+    ANDROID_CONTROL_MODE_AUTO                                       , // HIDL v3.2
+    ANDROID_CONTROL_MODE_USE_SCENE_MODE                             , // HIDL v3.2
+    ANDROID_CONTROL_MODE_OFF_KEEP_STATE                             , // HIDL v3.2
 } camera_metadata_enum_android_control_mode_t;
 
 // ANDROID_CONTROL_SCENE_MODE
 typedef enum camera_metadata_enum_android_control_scene_mode {
-    ANDROID_CONTROL_SCENE_MODE_DISABLED                         = 0,
-    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,
-    ANDROID_CONTROL_SCENE_MODE_ACTION,
-    ANDROID_CONTROL_SCENE_MODE_PORTRAIT,
-    ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,
-    ANDROID_CONTROL_SCENE_MODE_NIGHT,
-    ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT,
-    ANDROID_CONTROL_SCENE_MODE_THEATRE,
-    ANDROID_CONTROL_SCENE_MODE_BEACH,
-    ANDROID_CONTROL_SCENE_MODE_SNOW,
-    ANDROID_CONTROL_SCENE_MODE_SUNSET,
-    ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,
-    ANDROID_CONTROL_SCENE_MODE_FIREWORKS,
-    ANDROID_CONTROL_SCENE_MODE_SPORTS,
-    ANDROID_CONTROL_SCENE_MODE_PARTY,
-    ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,
-    ANDROID_CONTROL_SCENE_MODE_BARCODE,
-    ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO,
-    ANDROID_CONTROL_SCENE_MODE_HDR,
-    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT,
-    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START              = 100,
-    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END                = 127,
+    ANDROID_CONTROL_SCENE_MODE_DISABLED                              = 0, // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY                        , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_ACTION                               , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_PORTRAIT                             , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_LANDSCAPE                            , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_NIGHT                                , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT                       , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_THEATRE                              , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_BEACH                                , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_SNOW                                 , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_SUNSET                               , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO                          , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_FIREWORKS                            , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_SPORTS                               , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_PARTY                                , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT                          , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_BARCODE                              , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO                     , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_HDR                                  , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT              , // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_START                   = 100, // HIDL v3.2
+    ANDROID_CONTROL_SCENE_MODE_DEVICE_CUSTOM_END                     = 127, // HIDL v3.2
 } camera_metadata_enum_android_control_scene_mode_t;
 
 // ANDROID_CONTROL_VIDEO_STABILIZATION_MODE
 typedef enum camera_metadata_enum_android_control_video_stabilization_mode {
-    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
-    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON,
+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF                    , // HIDL v3.2
+    ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON                     , // HIDL v3.2
 } camera_metadata_enum_android_control_video_stabilization_mode_t;
 
 // ANDROID_CONTROL_AE_STATE
 typedef enum camera_metadata_enum_android_control_ae_state {
-    ANDROID_CONTROL_AE_STATE_INACTIVE,
-    ANDROID_CONTROL_AE_STATE_SEARCHING,
-    ANDROID_CONTROL_AE_STATE_CONVERGED,
-    ANDROID_CONTROL_AE_STATE_LOCKED,
-    ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED,
-    ANDROID_CONTROL_AE_STATE_PRECAPTURE,
+    ANDROID_CONTROL_AE_STATE_INACTIVE                               , // HIDL v3.2
+    ANDROID_CONTROL_AE_STATE_SEARCHING                              , // HIDL v3.2
+    ANDROID_CONTROL_AE_STATE_CONVERGED                              , // HIDL v3.2
+    ANDROID_CONTROL_AE_STATE_LOCKED                                 , // HIDL v3.2
+    ANDROID_CONTROL_AE_STATE_FLASH_REQUIRED                         , // HIDL v3.2
+    ANDROID_CONTROL_AE_STATE_PRECAPTURE                             , // HIDL v3.2
 } camera_metadata_enum_android_control_ae_state_t;
 
 // ANDROID_CONTROL_AF_STATE
 typedef enum camera_metadata_enum_android_control_af_state {
-    ANDROID_CONTROL_AF_STATE_INACTIVE,
-    ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN,
-    ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED,
-    ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN,
-    ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED,
-    ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED,
-    ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED,
+    ANDROID_CONTROL_AF_STATE_INACTIVE                               , // HIDL v3.2
+    ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN                           , // HIDL v3.2
+    ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED                        , // HIDL v3.2
+    ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN                            , // HIDL v3.2
+    ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED                         , // HIDL v3.2
+    ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED                     , // HIDL v3.2
+    ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED                      , // HIDL v3.2
 } camera_metadata_enum_android_control_af_state_t;
 
 // ANDROID_CONTROL_AWB_STATE
 typedef enum camera_metadata_enum_android_control_awb_state {
-    ANDROID_CONTROL_AWB_STATE_INACTIVE,
-    ANDROID_CONTROL_AWB_STATE_SEARCHING,
-    ANDROID_CONTROL_AWB_STATE_CONVERGED,
-    ANDROID_CONTROL_AWB_STATE_LOCKED,
+    ANDROID_CONTROL_AWB_STATE_INACTIVE                              , // HIDL v3.2
+    ANDROID_CONTROL_AWB_STATE_SEARCHING                             , // HIDL v3.2
+    ANDROID_CONTROL_AWB_STATE_CONVERGED                             , // HIDL v3.2
+    ANDROID_CONTROL_AWB_STATE_LOCKED                                , // HIDL v3.2
 } camera_metadata_enum_android_control_awb_state_t;
 
 // ANDROID_CONTROL_AE_LOCK_AVAILABLE
 typedef enum camera_metadata_enum_android_control_ae_lock_available {
-    ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE,
-    ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE,
+    ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE                         , // HIDL v3.2
+    ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE                          , // HIDL v3.2
 } camera_metadata_enum_android_control_ae_lock_available_t;
 
 // ANDROID_CONTROL_AWB_LOCK_AVAILABLE
 typedef enum camera_metadata_enum_android_control_awb_lock_available {
-    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE,
-    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE,
+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE                        , // HIDL v3.2
+    ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE                         , // HIDL v3.2
 } camera_metadata_enum_android_control_awb_lock_available_t;
 
 // ANDROID_CONTROL_ENABLE_ZSL
 typedef enum camera_metadata_enum_android_control_enable_zsl {
-    ANDROID_CONTROL_ENABLE_ZSL_FALSE,
-    ANDROID_CONTROL_ENABLE_ZSL_TRUE,
+    ANDROID_CONTROL_ENABLE_ZSL_FALSE                                , // HIDL v3.2
+    ANDROID_CONTROL_ENABLE_ZSL_TRUE                                 , // HIDL v3.2
 } camera_metadata_enum_android_control_enable_zsl_t;
 
+// ANDROID_CONTROL_AF_SCENE_CHANGE
+typedef enum camera_metadata_enum_android_control_af_scene_change {
+    ANDROID_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED                    , // HIDL v3.3
+    ANDROID_CONTROL_AF_SCENE_CHANGE_DETECTED                        , // HIDL v3.3
+} camera_metadata_enum_android_control_af_scene_change_t;
+
 
 // ANDROID_DEMOSAIC_MODE
 typedef enum camera_metadata_enum_android_demosaic_mode {
-    ANDROID_DEMOSAIC_MODE_FAST,
-    ANDROID_DEMOSAIC_MODE_HIGH_QUALITY,
+    ANDROID_DEMOSAIC_MODE_FAST                                      , // HIDL v3.2
+    ANDROID_DEMOSAIC_MODE_HIGH_QUALITY                              , // HIDL v3.2
 } camera_metadata_enum_android_demosaic_mode_t;
 
 
 // ANDROID_EDGE_MODE
 typedef enum camera_metadata_enum_android_edge_mode {
-    ANDROID_EDGE_MODE_OFF,
-    ANDROID_EDGE_MODE_FAST,
-    ANDROID_EDGE_MODE_HIGH_QUALITY,
-    ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG,
+    ANDROID_EDGE_MODE_OFF                                           , // HIDL v3.2
+    ANDROID_EDGE_MODE_FAST                                          , // HIDL v3.2
+    ANDROID_EDGE_MODE_HIGH_QUALITY                                  , // HIDL v3.2
+    ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG                              , // HIDL v3.2
 } camera_metadata_enum_android_edge_mode_t;
 
 
 // ANDROID_FLASH_MODE
 typedef enum camera_metadata_enum_android_flash_mode {
-    ANDROID_FLASH_MODE_OFF,
-    ANDROID_FLASH_MODE_SINGLE,
-    ANDROID_FLASH_MODE_TORCH,
+    ANDROID_FLASH_MODE_OFF                                          , // HIDL v3.2
+    ANDROID_FLASH_MODE_SINGLE                                       , // HIDL v3.2
+    ANDROID_FLASH_MODE_TORCH                                        , // HIDL v3.2
 } camera_metadata_enum_android_flash_mode_t;
 
 // ANDROID_FLASH_STATE
 typedef enum camera_metadata_enum_android_flash_state {
-    ANDROID_FLASH_STATE_UNAVAILABLE,
-    ANDROID_FLASH_STATE_CHARGING,
-    ANDROID_FLASH_STATE_READY,
-    ANDROID_FLASH_STATE_FIRED,
-    ANDROID_FLASH_STATE_PARTIAL,
+    ANDROID_FLASH_STATE_UNAVAILABLE                                 , // HIDL v3.2
+    ANDROID_FLASH_STATE_CHARGING                                    , // HIDL v3.2
+    ANDROID_FLASH_STATE_READY                                       , // HIDL v3.2
+    ANDROID_FLASH_STATE_FIRED                                       , // HIDL v3.2
+    ANDROID_FLASH_STATE_PARTIAL                                     , // HIDL v3.2
 } camera_metadata_enum_android_flash_state_t;
 
 
 // ANDROID_FLASH_INFO_AVAILABLE
 typedef enum camera_metadata_enum_android_flash_info_available {
-    ANDROID_FLASH_INFO_AVAILABLE_FALSE,
-    ANDROID_FLASH_INFO_AVAILABLE_TRUE,
+    ANDROID_FLASH_INFO_AVAILABLE_FALSE                              , // HIDL v3.2
+    ANDROID_FLASH_INFO_AVAILABLE_TRUE                               , // HIDL v3.2
 } camera_metadata_enum_android_flash_info_available_t;
 
 
 // ANDROID_HOT_PIXEL_MODE
 typedef enum camera_metadata_enum_android_hot_pixel_mode {
-    ANDROID_HOT_PIXEL_MODE_OFF,
-    ANDROID_HOT_PIXEL_MODE_FAST,
-    ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY,
+    ANDROID_HOT_PIXEL_MODE_OFF                                      , // HIDL v3.2
+    ANDROID_HOT_PIXEL_MODE_FAST                                     , // HIDL v3.2
+    ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY                             , // HIDL v3.2
 } camera_metadata_enum_android_hot_pixel_mode_t;
 
 
 
 // ANDROID_LENS_OPTICAL_STABILIZATION_MODE
 typedef enum camera_metadata_enum_android_lens_optical_stabilization_mode {
-    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
-    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON,
+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF                     , // HIDL v3.2
+    ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON                      , // HIDL v3.2
 } camera_metadata_enum_android_lens_optical_stabilization_mode_t;
 
 // ANDROID_LENS_FACING
 typedef enum camera_metadata_enum_android_lens_facing {
-    ANDROID_LENS_FACING_FRONT,
-    ANDROID_LENS_FACING_BACK,
-    ANDROID_LENS_FACING_EXTERNAL,
+    ANDROID_LENS_FACING_FRONT                                       , // HIDL v3.2
+    ANDROID_LENS_FACING_BACK                                        , // HIDL v3.2
+    ANDROID_LENS_FACING_EXTERNAL                                    , // HIDL v3.2
 } camera_metadata_enum_android_lens_facing_t;
 
 // ANDROID_LENS_STATE
 typedef enum camera_metadata_enum_android_lens_state {
-    ANDROID_LENS_STATE_STATIONARY,
-    ANDROID_LENS_STATE_MOVING,
+    ANDROID_LENS_STATE_STATIONARY                                   , // HIDL v3.2
+    ANDROID_LENS_STATE_MOVING                                       , // HIDL v3.2
 } camera_metadata_enum_android_lens_state_t;
 
+// ANDROID_LENS_POSE_REFERENCE
+typedef enum camera_metadata_enum_android_lens_pose_reference {
+    ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA                      , // HIDL v3.3
+    ANDROID_LENS_POSE_REFERENCE_GYROSCOPE                           , // HIDL v3.3
+} camera_metadata_enum_android_lens_pose_reference_t;
+
 
 // ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION
 typedef enum camera_metadata_enum_android_lens_info_focus_distance_calibration {
-    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
-    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
-    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED       , // HIDL v3.2
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE        , // HIDL v3.2
+    ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED         , // HIDL v3.2
 } camera_metadata_enum_android_lens_info_focus_distance_calibration_t;
 
 
 // ANDROID_NOISE_REDUCTION_MODE
 typedef enum camera_metadata_enum_android_noise_reduction_mode {
-    ANDROID_NOISE_REDUCTION_MODE_OFF,
-    ANDROID_NOISE_REDUCTION_MODE_FAST,
-    ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
-    ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
-    ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG,
+    ANDROID_NOISE_REDUCTION_MODE_OFF                                , // HIDL v3.2
+    ANDROID_NOISE_REDUCTION_MODE_FAST                               , // HIDL v3.2
+    ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY                       , // HIDL v3.2
+    ANDROID_NOISE_REDUCTION_MODE_MINIMAL                            , // HIDL v3.2
+    ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG                   , // HIDL v3.2
 } camera_metadata_enum_android_noise_reduction_mode_t;
 
 
 // ANDROID_QUIRKS_PARTIAL_RESULT
 typedef enum camera_metadata_enum_android_quirks_partial_result {
-    ANDROID_QUIRKS_PARTIAL_RESULT_FINAL,
-    ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL,
+    ANDROID_QUIRKS_PARTIAL_RESULT_FINAL                             , // HIDL v3.2
+    ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL                           , // HIDL v3.2
 } camera_metadata_enum_android_quirks_partial_result_t;
 
 
 // ANDROID_REQUEST_METADATA_MODE
 typedef enum camera_metadata_enum_android_request_metadata_mode {
-    ANDROID_REQUEST_METADATA_MODE_NONE,
-    ANDROID_REQUEST_METADATA_MODE_FULL,
+    ANDROID_REQUEST_METADATA_MODE_NONE                              , // HIDL v3.2
+    ANDROID_REQUEST_METADATA_MODE_FULL                              , // HIDL v3.2
 } camera_metadata_enum_android_request_metadata_mode_t;
 
 // ANDROID_REQUEST_TYPE
 typedef enum camera_metadata_enum_android_request_type {
-    ANDROID_REQUEST_TYPE_CAPTURE,
-    ANDROID_REQUEST_TYPE_REPROCESS,
+    ANDROID_REQUEST_TYPE_CAPTURE                                    , // HIDL v3.2
+    ANDROID_REQUEST_TYPE_REPROCESS                                  , // HIDL v3.2
 } camera_metadata_enum_android_request_type_t;
 
 // ANDROID_REQUEST_AVAILABLE_CAPABILITIES
 typedef enum camera_metadata_enum_android_request_available_capabilities {
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT,
-    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO,
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE      , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR            , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING   , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW                      , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING     , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS     , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE            , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING         , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT             , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
+                                                                     , // HIDL v3.2
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING          , // HIDL v3.3
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA     , // HIDL v3.3
+    ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME               , // HIDL v3.3
 } camera_metadata_enum_android_request_available_capabilities_t;
 
 
 // ANDROID_SCALER_AVAILABLE_FORMATS
 typedef enum camera_metadata_enum_android_scaler_available_formats {
-    ANDROID_SCALER_AVAILABLE_FORMATS_RAW16                      = 0x20,
-    ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE                 = 0x24,
-    ANDROID_SCALER_AVAILABLE_FORMATS_YV12                       = 0x32315659,
-    ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP               = 0x11,
-    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED     = 0x22,
-    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888              = 0x23,
-    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB                       = 0x21,
+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW16                           = 0x20, // HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE                      = 0x24, // HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_FORMATS_YV12                            = 0x32315659, // HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_FORMATS_YCrCb_420_SP                    = 0x11, // HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED          = 0x22, // HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888                   = 0x23, // HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_FORMATS_BLOB                            = 0x21, // HIDL v3.2
 } camera_metadata_enum_android_scaler_available_formats_t;
 
 // ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
 typedef enum camera_metadata_enum_android_scaler_available_stream_configurations {
-    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
-    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT           , // HIDL v3.2
+    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT            , // HIDL v3.2
 } camera_metadata_enum_android_scaler_available_stream_configurations_t;
 
 // ANDROID_SCALER_CROPPING_TYPE
 typedef enum camera_metadata_enum_android_scaler_cropping_type {
-    ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY,
-    ANDROID_SCALER_CROPPING_TYPE_FREEFORM,
+    ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY                        , // HIDL v3.2
+    ANDROID_SCALER_CROPPING_TYPE_FREEFORM                           , // HIDL v3.2
 } camera_metadata_enum_android_scaler_cropping_type_t;
 
 
 // ANDROID_SENSOR_REFERENCE_ILLUMINANT1
 typedef enum camera_metadata_enum_android_sensor_reference_illuminant1 {
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT               = 1,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT            = 2,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN               = 3,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH                  = 4,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER           = 9,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER         = 10,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE                  = 11,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT   = 12,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT  = 13,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT      = 15,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A             = 17,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B             = 18,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C             = 19,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55                    = 20,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65                    = 21,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75                    = 22,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50                    = 23,
-    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN    = 24,
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT                    = 1, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT                 = 2, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN                    = 3, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLASH                       = 4, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER                = 9, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER              = 10, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE                       = 11, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT        = 12, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT       = 13, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT      = 14, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT           = 15, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A                  = 17, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B                  = 18, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C                  = 19, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55                         = 20, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65                         = 21, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75                         = 22, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50                         = 23, // HIDL v3.2
+    ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN         = 24, // HIDL v3.2
 } camera_metadata_enum_android_sensor_reference_illuminant1_t;
 
 // ANDROID_SENSOR_TEST_PATTERN_MODE
 typedef enum camera_metadata_enum_android_sensor_test_pattern_mode {
-    ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,
-    ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,
-    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,
-    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY,
-    ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,
-    ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1                    = 256,
+    ANDROID_SENSOR_TEST_PATTERN_MODE_OFF                            , // HIDL v3.2
+    ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR                    , // HIDL v3.2
+    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS                     , // HIDL v3.2
+    ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY        , // HIDL v3.2
+    ANDROID_SENSOR_TEST_PATTERN_MODE_PN9                            , // HIDL v3.2
+    ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1                         = 256, // HIDL v3.2
 } camera_metadata_enum_android_sensor_test_pattern_mode_t;
 
 
 // ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
 typedef enum camera_metadata_enum_android_sensor_info_color_filter_arrangement {
-    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB,
-    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG,
-    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG,
-    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR,
-    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB,
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB               , // HIDL v3.2
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG               , // HIDL v3.2
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG               , // HIDL v3.2
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR               , // HIDL v3.2
+    ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB                , // HIDL v3.2
 } camera_metadata_enum_android_sensor_info_color_filter_arrangement_t;
 
 // ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE
 typedef enum camera_metadata_enum_android_sensor_info_timestamp_source {
-    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN,
-    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME,
+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN                    , // HIDL v3.2
+    ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME                   , // HIDL v3.2
 } camera_metadata_enum_android_sensor_info_timestamp_source_t;
 
 // ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED
 typedef enum camera_metadata_enum_android_sensor_info_lens_shading_applied {
-    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE,
-    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE,
+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_FALSE                  , // HIDL v3.2
+    ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED_TRUE                   , // HIDL v3.2
 } camera_metadata_enum_android_sensor_info_lens_shading_applied_t;
 
 
 // ANDROID_SHADING_MODE
 typedef enum camera_metadata_enum_android_shading_mode {
-    ANDROID_SHADING_MODE_OFF,
-    ANDROID_SHADING_MODE_FAST,
-    ANDROID_SHADING_MODE_HIGH_QUALITY,
+    ANDROID_SHADING_MODE_OFF                                        , // HIDL v3.2
+    ANDROID_SHADING_MODE_FAST                                       , // HIDL v3.2
+    ANDROID_SHADING_MODE_HIGH_QUALITY                               , // HIDL v3.2
 } camera_metadata_enum_android_shading_mode_t;
 
 
 // ANDROID_STATISTICS_FACE_DETECT_MODE
 typedef enum camera_metadata_enum_android_statistics_face_detect_mode {
-    ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
-    ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
-    ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,
+    ANDROID_STATISTICS_FACE_DETECT_MODE_OFF                         , // HIDL v3.2
+    ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE                      , // HIDL v3.2
+    ANDROID_STATISTICS_FACE_DETECT_MODE_FULL                        , // HIDL v3.2
 } camera_metadata_enum_android_statistics_face_detect_mode_t;
 
 // ANDROID_STATISTICS_HISTOGRAM_MODE
 typedef enum camera_metadata_enum_android_statistics_histogram_mode {
-    ANDROID_STATISTICS_HISTOGRAM_MODE_OFF,
-    ANDROID_STATISTICS_HISTOGRAM_MODE_ON,
+    ANDROID_STATISTICS_HISTOGRAM_MODE_OFF                           , // HIDL v3.2
+    ANDROID_STATISTICS_HISTOGRAM_MODE_ON                            , // HIDL v3.2
 } camera_metadata_enum_android_statistics_histogram_mode_t;
 
 // ANDROID_STATISTICS_SHARPNESS_MAP_MODE
 typedef enum camera_metadata_enum_android_statistics_sharpness_map_mode {
-    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF,
-    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON,
+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF                       , // HIDL v3.2
+    ANDROID_STATISTICS_SHARPNESS_MAP_MODE_ON                        , // HIDL v3.2
 } camera_metadata_enum_android_statistics_sharpness_map_mode_t;
 
 // ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE
 typedef enum camera_metadata_enum_android_statistics_hot_pixel_map_mode {
-    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF,
-    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON,
+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF                       , // HIDL v3.2
+    ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_ON                        , // HIDL v3.2
 } camera_metadata_enum_android_statistics_hot_pixel_map_mode_t;
 
 // ANDROID_STATISTICS_SCENE_FLICKER
 typedef enum camera_metadata_enum_android_statistics_scene_flicker {
-    ANDROID_STATISTICS_SCENE_FLICKER_NONE,
-    ANDROID_STATISTICS_SCENE_FLICKER_50HZ,
-    ANDROID_STATISTICS_SCENE_FLICKER_60HZ,
+    ANDROID_STATISTICS_SCENE_FLICKER_NONE                           , // HIDL v3.2
+    ANDROID_STATISTICS_SCENE_FLICKER_50HZ                           , // HIDL v3.2
+    ANDROID_STATISTICS_SCENE_FLICKER_60HZ                           , // HIDL v3.2
 } camera_metadata_enum_android_statistics_scene_flicker_t;
 
 // ANDROID_STATISTICS_LENS_SHADING_MAP_MODE
 typedef enum camera_metadata_enum_android_statistics_lens_shading_map_mode {
-    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
-    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON,
+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF                    , // HIDL v3.2
+    ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON                     , // HIDL v3.2
 } camera_metadata_enum_android_statistics_lens_shading_map_mode_t;
 
+// ANDROID_STATISTICS_OIS_DATA_MODE
+typedef enum camera_metadata_enum_android_statistics_ois_data_mode {
+    ANDROID_STATISTICS_OIS_DATA_MODE_OFF                            , // HIDL v3.3
+    ANDROID_STATISTICS_OIS_DATA_MODE_ON                             , // HIDL v3.3
+} camera_metadata_enum_android_statistics_ois_data_mode_t;
+
 
 
 // ANDROID_TONEMAP_MODE
 typedef enum camera_metadata_enum_android_tonemap_mode {
-    ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
-    ANDROID_TONEMAP_MODE_FAST,
-    ANDROID_TONEMAP_MODE_HIGH_QUALITY,
-    ANDROID_TONEMAP_MODE_GAMMA_VALUE,
-    ANDROID_TONEMAP_MODE_PRESET_CURVE,
+    ANDROID_TONEMAP_MODE_CONTRAST_CURVE                             , // HIDL v3.2
+    ANDROID_TONEMAP_MODE_FAST                                       , // HIDL v3.2
+    ANDROID_TONEMAP_MODE_HIGH_QUALITY                               , // HIDL v3.2
+    ANDROID_TONEMAP_MODE_GAMMA_VALUE                                , // HIDL v3.2
+    ANDROID_TONEMAP_MODE_PRESET_CURVE                               , // HIDL v3.2
 } camera_metadata_enum_android_tonemap_mode_t;
 
 // ANDROID_TONEMAP_PRESET_CURVE
 typedef enum camera_metadata_enum_android_tonemap_preset_curve {
-    ANDROID_TONEMAP_PRESET_CURVE_SRGB,
-    ANDROID_TONEMAP_PRESET_CURVE_REC709,
+    ANDROID_TONEMAP_PRESET_CURVE_SRGB                               , // HIDL v3.2
+    ANDROID_TONEMAP_PRESET_CURVE_REC709                             , // HIDL v3.2
 } camera_metadata_enum_android_tonemap_preset_curve_t;
 
 
 // ANDROID_LED_TRANSMIT
 typedef enum camera_metadata_enum_android_led_transmit {
-    ANDROID_LED_TRANSMIT_OFF,
-    ANDROID_LED_TRANSMIT_ON,
+    ANDROID_LED_TRANSMIT_OFF                                        , // HIDL v3.2
+    ANDROID_LED_TRANSMIT_ON                                         , // HIDL v3.2
 } camera_metadata_enum_android_led_transmit_t;
 
 // ANDROID_LED_AVAILABLE_LEDS
 typedef enum camera_metadata_enum_android_led_available_leds {
-    ANDROID_LED_AVAILABLE_LEDS_TRANSMIT,
+    ANDROID_LED_AVAILABLE_LEDS_TRANSMIT                             , // HIDL v3.2
 } camera_metadata_enum_android_led_available_leds_t;
 
 
 // ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL
 typedef enum camera_metadata_enum_android_info_supported_hardware_level {
-    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
-    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
-    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
-    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3,
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED                   , // HIDL v3.2
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL                      , // HIDL v3.2
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY                    , // HIDL v3.2
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3                         , // HIDL v3.2
+    ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL                  , // HIDL v3.3
 } camera_metadata_enum_android_info_supported_hardware_level_t;
 
 
 // ANDROID_BLACK_LEVEL_LOCK
 typedef enum camera_metadata_enum_android_black_level_lock {
-    ANDROID_BLACK_LEVEL_LOCK_OFF,
-    ANDROID_BLACK_LEVEL_LOCK_ON,
+    ANDROID_BLACK_LEVEL_LOCK_OFF                                    , // HIDL v3.2
+    ANDROID_BLACK_LEVEL_LOCK_ON                                     , // HIDL v3.2
 } camera_metadata_enum_android_black_level_lock_t;
 
 
 // ANDROID_SYNC_FRAME_NUMBER
 typedef enum camera_metadata_enum_android_sync_frame_number {
-    ANDROID_SYNC_FRAME_NUMBER_CONVERGING                        = -1,
-    ANDROID_SYNC_FRAME_NUMBER_UNKNOWN                           = -2,
+    ANDROID_SYNC_FRAME_NUMBER_CONVERGING                             = -1, // HIDL v3.2
+    ANDROID_SYNC_FRAME_NUMBER_UNKNOWN                                = -2, // HIDL v3.2
 } camera_metadata_enum_android_sync_frame_number_t;
 
 // ANDROID_SYNC_MAX_LATENCY
 typedef enum camera_metadata_enum_android_sync_max_latency {
-    ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL                  = 0,
-    ANDROID_SYNC_MAX_LATENCY_UNKNOWN                            = -1,
+    ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL                       = 0, // HIDL v3.2
+    ANDROID_SYNC_MAX_LATENCY_UNKNOWN                                 = -1, // HIDL v3.2
 } camera_metadata_enum_android_sync_max_latency_t;
 
 
 
 // ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS
 typedef enum camera_metadata_enum_android_depth_available_depth_stream_configurations {
-    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
-    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT,
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT      , // HIDL v3.2
+    ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT       , // HIDL v3.2
 } camera_metadata_enum_android_depth_available_depth_stream_configurations_t;
 
 // ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
 typedef enum camera_metadata_enum_android_depth_depth_is_exclusive {
-    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE,
-    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE,
+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE                          , // HIDL v3.2
+    ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_TRUE                           , // HIDL v3.2
 } camera_metadata_enum_android_depth_depth_is_exclusive_t;
 
 
+// ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE
+typedef enum camera_metadata_enum_android_logical_multi_camera_sensor_sync_type {
+    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE       , // HIDL v3.3
+    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED        , // HIDL v3.3
+} camera_metadata_enum_android_logical_multi_camera_sensor_sync_type_t;
+
+
+// ANDROID_DISTORTION_CORRECTION_MODE
+typedef enum camera_metadata_enum_android_distortion_correction_mode {
+    ANDROID_DISTORTION_CORRECTION_MODE_OFF                          , // HIDL v3.3
+    ANDROID_DISTORTION_CORRECTION_MODE_FAST                         , // HIDL v3.3
+    ANDROID_DISTORTION_CORRECTION_MODE_HIGH_QUALITY                 , // HIDL v3.3
+} camera_metadata_enum_android_distortion_correction_mode_t;
+
+
index 23af44b..75ad1f4 100644 (file)
@@ -57,6 +57,9 @@ const char *camera_metadata_section_names[ANDROID_SECTION_COUNT] = {
     [ANDROID_SYNC]                 = "android.sync",
     [ANDROID_REPROCESS]            = "android.reprocess",
     [ANDROID_DEPTH]                = "android.depth",
+    [ANDROID_LOGICAL_MULTI_CAMERA] = "android.logicalMultiCamera",
+    [ANDROID_DISTORTION_CORRECTION]
+                                    = "android.distortionCorrection",
 };
 
 unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2] = {
@@ -112,6 +115,11 @@ unsigned int camera_metadata_section_bounds[ANDROID_SECTION_COUNT][2] = {
                                        ANDROID_REPROCESS_END },
     [ANDROID_DEPTH]                = { ANDROID_DEPTH_START,
                                        ANDROID_DEPTH_END },
+    [ANDROID_LOGICAL_MULTI_CAMERA] = { ANDROID_LOGICAL_MULTI_CAMERA_START,
+                                       ANDROID_LOGICAL_MULTI_CAMERA_END },
+    [ANDROID_DISTORTION_CORRECTION]
+                                    = { ANDROID_DISTORTION_CORRECTION_START,
+                                       ANDROID_DISTORTION_CORRECTION_END },
 };
 
 static tag_info_t android_color_correction[ANDROID_COLOR_CORRECTION_END -
@@ -218,6 +226,8 @@ static tag_info_t android_control[ANDROID_CONTROL_END -
     { "postRawSensitivityBoost",       TYPE_INT32  },
     [ ANDROID_CONTROL_ENABLE_ZSL - ANDROID_CONTROL_START ] =
     { "enableZsl",                     TYPE_BYTE   },
+    [ ANDROID_CONTROL_AF_SCENE_CHANGE - ANDROID_CONTROL_START ] =
+    { "afSceneChange",                 TYPE_BYTE   },
 };
 
 static tag_info_t android_demosaic[ANDROID_DEMOSAIC_END -
@@ -318,6 +328,10 @@ static tag_info_t android_lens[ANDROID_LENS_END -
     { "intrinsicCalibration",          TYPE_FLOAT  },
     [ ANDROID_LENS_RADIAL_DISTORTION - ANDROID_LENS_START ] =
     { "radialDistortion",              TYPE_FLOAT  },
+    [ ANDROID_LENS_POSE_REFERENCE - ANDROID_LENS_START ] =
+    { "poseReference",                 TYPE_BYTE   },
+    [ ANDROID_LENS_DISTORTION - ANDROID_LENS_START ] =
+    { "distortion",                    TYPE_FLOAT  },
 };
 
 static tag_info_t android_lens_info[ANDROID_LENS_INFO_END -
@@ -398,6 +412,11 @@ static tag_info_t android_request[ANDROID_REQUEST_END -
     { "availableResultKeys",           TYPE_INT32  },
     [ ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS - ANDROID_REQUEST_START ] =
     { "availableCharacteristicsKeys",  TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_SESSION_KEYS - ANDROID_REQUEST_START ] =
+    { "availableSessionKeys",          TYPE_INT32  },
+    [ ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS - ANDROID_REQUEST_START ] =
+    { "availablePhysicalCameraRequestKeys",
+                                        TYPE_INT32  },
 };
 
 static tag_info_t android_scaler[ANDROID_SCALER_END -
@@ -581,6 +600,14 @@ static tag_info_t android_statistics[ANDROID_STATISTICS_END -
     { "hotPixelMap",                   TYPE_INT32  },
     [ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE - ANDROID_STATISTICS_START ] =
     { "lensShadingMapMode",            TYPE_BYTE   },
+    [ ANDROID_STATISTICS_OIS_DATA_MODE - ANDROID_STATISTICS_START ] =
+    { "oisDataMode",                   TYPE_BYTE   },
+    [ ANDROID_STATISTICS_OIS_TIMESTAMPS - ANDROID_STATISTICS_START ] =
+    { "oisTimestamps",                 TYPE_INT64  },
+    [ ANDROID_STATISTICS_OIS_X_SHIFTS - ANDROID_STATISTICS_START ] =
+    { "oisXShifts",                    TYPE_FLOAT  },
+    [ ANDROID_STATISTICS_OIS_Y_SHIFTS - ANDROID_STATISTICS_START ] =
+    { "oisYShifts",                    TYPE_FLOAT  },
 };
 
 static tag_info_t android_statistics_info[ANDROID_STATISTICS_INFO_END -
@@ -601,6 +628,8 @@ static tag_info_t android_statistics_info[ANDROID_STATISTICS_INFO_END -
     { "availableHotPixelMapModes",     TYPE_BYTE   },
     [ ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES - ANDROID_STATISTICS_INFO_START ] =
     { "availableLensShadingMapModes",  TYPE_BYTE   },
+    [ ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES - ANDROID_STATISTICS_INFO_START ] =
+    { "availableOisDataModes",         TYPE_BYTE   },
 };
 
 static tag_info_t android_tonemap[ANDROID_TONEMAP_END -
@@ -635,6 +664,8 @@ static tag_info_t android_info[ANDROID_INFO_END -
         ANDROID_INFO_START] = {
     [ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL - ANDROID_INFO_START ] =
     { "supportedHardwareLevel",        TYPE_BYTE   },
+    [ ANDROID_INFO_VERSION - ANDROID_INFO_START ] =
+    { "version",                       TYPE_BYTE   },
 };
 
 static tag_info_t android_black_level[ANDROID_BLACK_LEVEL_END -
@@ -675,6 +706,22 @@ static tag_info_t android_depth[ANDROID_DEPTH_END -
     { "depthIsExclusive",              TYPE_BYTE   },
 };
 
+static tag_info_t android_logical_multi_camera[ANDROID_LOGICAL_MULTI_CAMERA_END -
+        ANDROID_LOGICAL_MULTI_CAMERA_START] = {
+    [ ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS - ANDROID_LOGICAL_MULTI_CAMERA_START ] =
+    { "physicalIds",                   TYPE_BYTE   },
+    [ ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE - ANDROID_LOGICAL_MULTI_CAMERA_START ] =
+    { "sensorSyncType",                TYPE_BYTE   },
+};
+
+static tag_info_t android_distortion_correction[ANDROID_DISTORTION_CORRECTION_END -
+        ANDROID_DISTORTION_CORRECTION_START] = {
+    [ ANDROID_DISTORTION_CORRECTION_MODE - ANDROID_DISTORTION_CORRECTION_START ] =
+    { "mode",                          TYPE_BYTE   },
+    [ ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES - ANDROID_DISTORTION_CORRECTION_START ] =
+    { "availableModes",                TYPE_BYTE   },
+};
+
 
 tag_info_t *tag_info[ANDROID_SECTION_COUNT] = {
     android_color_correction,
@@ -703,6 +750,8 @@ tag_info_t *tag_info[ANDROID_SECTION_COUNT] = {
     android_sync,
     android_reprocess,
     android_depth,
+    android_logical_multi_camera,
+    android_distortion_correction,
 };
 
 int camera_metadata_enum_snprint(uint32_t tag,
@@ -824,6 +873,10 @@ int camera_metadata_enum_snprint(uint32_t tag,
                     msg = "ON_AUTO_FLASH_REDEYE";
                     ret = 0;
                     break;
+                case ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH:
+                    msg = "ON_EXTERNAL_FLASH";
+                    ret = 0;
+                    break;
                 default:
                     msg = "error: enum value out of range";
             }
@@ -998,6 +1051,10 @@ int camera_metadata_enum_snprint(uint32_t tag,
                     msg = "MANUAL";
                     ret = 0;
                     break;
+                case ANDROID_CONTROL_CAPTURE_INTENT_MOTION_TRACKING:
+                    msg = "MOTION_TRACKING";
+                    ret = 0;
+                    break;
                 default:
                     msg = "error: enum value out of range";
             }
@@ -1367,6 +1424,21 @@ int camera_metadata_enum_snprint(uint32_t tag,
             }
             break;
         }
+        case ANDROID_CONTROL_AF_SCENE_CHANGE: {
+            switch (value) {
+                case ANDROID_CONTROL_AF_SCENE_CHANGE_NOT_DETECTED:
+                    msg = "NOT_DETECTED";
+                    ret = 0;
+                    break;
+                case ANDROID_CONTROL_AF_SCENE_CHANGE_DETECTED:
+                    msg = "DETECTED";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
 
         case ANDROID_DEMOSAIC_MODE: {
             switch (value) {
@@ -1622,6 +1694,24 @@ int camera_metadata_enum_snprint(uint32_t tag,
         case ANDROID_LENS_RADIAL_DISTORTION: {
             break;
         }
+        case ANDROID_LENS_POSE_REFERENCE: {
+            switch (value) {
+                case ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA:
+                    msg = "PRIMARY_CAMERA";
+                    ret = 0;
+                    break;
+                case ANDROID_LENS_POSE_REFERENCE_GYROSCOPE:
+                    msg = "GYROSCOPE";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_LENS_DISTORTION: {
+            break;
+        }
 
         case ANDROID_LENS_INFO_AVAILABLE_APERTURES: {
             break;
@@ -1828,6 +1918,18 @@ int camera_metadata_enum_snprint(uint32_t tag,
                     msg = "CONSTRAINED_HIGH_SPEED_VIDEO";
                     ret = 0;
                     break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING:
+                    msg = "MOTION_TRACKING";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA:
+                    msg = "LOGICAL_MULTI_CAMERA";
+                    ret = 0;
+                    break;
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME:
+                    msg = "MONOCHROME";
+                    ret = 0;
+                    break;
                 default:
                     msg = "error: enum value out of range";
             }
@@ -1842,6 +1944,12 @@ int camera_metadata_enum_snprint(uint32_t tag,
         case ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS: {
             break;
         }
+        case ANDROID_REQUEST_AVAILABLE_SESSION_KEYS: {
+            break;
+        }
+        case ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS: {
+            break;
+        }
 
         case ANDROID_SCALER_CROP_REGION: {
             break;
@@ -2383,6 +2491,30 @@ int camera_metadata_enum_snprint(uint32_t tag,
             }
             break;
         }
+        case ANDROID_STATISTICS_OIS_DATA_MODE: {
+            switch (value) {
+                case ANDROID_STATISTICS_OIS_DATA_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_STATISTICS_OIS_DATA_MODE_ON:
+                    msg = "ON";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_STATISTICS_OIS_TIMESTAMPS: {
+            break;
+        }
+        case ANDROID_STATISTICS_OIS_X_SHIFTS: {
+            break;
+        }
+        case ANDROID_STATISTICS_OIS_Y_SHIFTS: {
+            break;
+        }
 
         case ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES: {
             break;
@@ -2408,6 +2540,9 @@ int camera_metadata_enum_snprint(uint32_t tag,
         case ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES: {
             break;
         }
+        case ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES: {
+            break;
+        }
 
         case ANDROID_TONEMAP_CURVE_BLUE: {
             break;
@@ -2515,11 +2650,18 @@ int camera_metadata_enum_snprint(uint32_t tag,
                     msg = "3";
                     ret = 0;
                     break;
+                case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL:
+                    msg = "EXTERNAL";
+                    ret = 0;
+                    break;
                 default:
                     msg = "error: enum value out of range";
             }
             break;
         }
+        case ANDROID_INFO_VERSION: {
+            break;
+        }
 
         case ANDROID_BLACK_LEVEL_LOCK: {
             switch (value) {
@@ -2615,6 +2757,48 @@ int camera_metadata_enum_snprint(uint32_t tag,
             break;
         }
 
+        case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: {
+            break;
+        }
+        case ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE: {
+            switch (value) {
+                case ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE:
+                    msg = "APPROXIMATE";
+                    ret = 0;
+                    break;
+                case ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED:
+                    msg = "CALIBRATED";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+
+        case ANDROID_DISTORTION_CORRECTION_MODE: {
+            switch (value) {
+                case ANDROID_DISTORTION_CORRECTION_MODE_OFF:
+                    msg = "OFF";
+                    ret = 0;
+                    break;
+                case ANDROID_DISTORTION_CORRECTION_MODE_FAST:
+                    msg = "FAST";
+                    ret = 0;
+                    break;
+                case ANDROID_DISTORTION_CORRECTION_MODE_HIGH_QUALITY:
+                    msg = "HIGH_QUALITY";
+                    ret = 0;
+                    break;
+                default:
+                    msg = "error: enum value out of range";
+            }
+            break;
+        }
+        case ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES: {
+            break;
+        }
+
     }
 
     strncpy(dst, msg, size - 1);