From a46e02f132cca70a376e384a4e4dda725e9f384d Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Thu, 9 Jan 2014 17:43:38 -0800 Subject: [PATCH] camera_metadata: Add test patterns and pipeline depth information Bug: 9625427 Change-Id: I11ff3a74f3a115ac9b699ce516400ed323c274f9 --- camera/docs/docs.html | 516 +++++++++++++++++++++++++++ camera/docs/metadata_properties.xml | 210 +++++++++++ camera/include/system/camera_metadata_tags.h | 15 + camera/src/camera_metadata_tag_info.c | 55 ++- 4 files changed, 795 insertions(+), 1 deletion(-) diff --git a/camera/docs/docs.html b/camera/docs/docs.html index c355ff94..5faee3c1 100644 --- a/camera/docs/docs.html +++ b/camera/docs/docs.html @@ -423,6 +423,7 @@
  • @@ -432,6 +433,7 @@
  • android.request.id
  • android.request.metadataMode
  • android.request.outputStreams
  • +
  • android.request.pipelineDepth
  • @@ -475,6 +477,8 @@
  • android.sensor.exposureTime
  • android.sensor.frameDuration
  • android.sensor.sensitivity
  • +
  • android.sensor.testPatternData
  • +
  • android.sensor.testPatternMode
  • @@ -503,6 +507,7 @@
  • android.sensor.orientation
  • android.sensor.referenceIlluminant1
  • android.sensor.referenceIlluminant2
  • +
  • android.sensor.availableTestPatternModes
  • @@ -513,6 +518,7 @@
  • android.sensor.sensitivity
  • android.sensor.timestamp
  • android.sensor.temperature
  • +
  • android.sensor.testPatternMode
  • @@ -9093,6 +9099,66 @@ can be allocated at the same time

    + + + + android.request.pipelineMaxDepth + + + byte + + [public] + + + + + +

    Specifies the number of maximum pipeline stages a frame +has to go through from when it's exposed to when it's available +to the framework.

    + + + + + + + + + + + + + + Details + + + +

    A typical minimum value for this is 2 (one stage to expose, +one stage to readout) from the sensor. The ISP then usually adds +its own stages to do custom HW processing. Further stages may be +added by SW processing.

    +

    Depending on what settings are used (e.g. YUV, JPEG) and what +processing is enabled (e.g. face detection), the actual pipeline +depth (specified by android.request.pipelineDepth) may be less than +the max pipeline depth.

    +

    A pipeline depth of X stages is equivalent to a pipeline latency of +X frame intervals.

    +

    This value will be 8 or less.

    + + + + + HAL Implementation Details + + + +

    This value should be 4 or less.

    + + + + + + @@ -9309,6 +9375,61 @@ and the lens and flash must operate as requested.

    + + + + android.request.pipelineDepth + + + byte + + [public] + + + + + +

    Specifies the number of pipeline stages the frame went +through from when it was exposed to when the final completed result +was available to the framework.

    + + + + + + +

    <= android.request.pipelineMaxDepth

    + + + + + + + + Details + + + +

    Depending on what settings are used in the request, and +what streams are configured, the data may undergo less processing, +and some pipeline stages skipped.

    +

    See android.request.pipelineMaxDepth for more details.

    + + + + + HAL Implementation Details + + + +

    This value must always represent the accurate count of how many +pipeline stages were actually used.

    + + + + + + @@ -10284,6 +10405,217 @@ gain to the nearest possible value (rather than gain more).

    + + + + android.sensor.testPatternData + + + int32 + x + + + 4 + + [public] + + + + + +

    A pixel [R, G_even, G_odd, B] that supplies the test pattern +when android.sensor.testPatternMode is SOLID_COLOR.

    + + + + + + +

    Optional. +Must be supported if android.sensor.availableTestPatternModes contains +SOLID_COLOR.

    + + + + + + + + Details + + + +

    Each color channel is treated as an unsigned 32-bit integer. +The camera device then uses the most significant X bits +that correspond to how many bits are in its Bayer raw sensor +output.

    +

    For example, a sensor with RAW10 Bayer output would use the +10 most significant bits from each color channel.

    + + + + + HAL Implementation Details + + + + + + + + + + + + + + android.sensor.testPatternMode + + + int32 + + [public] + + + + + + +

    When enabled, the sensor sends a test pattern instead of +doing a real exposure from the camera.

    + + + + + + +

    Optional. Defaults to OFF. Value must be one of +android.sensor.availableTestPatternModes

    + + + + + + + + Details + + + +

    When a test pattern is enabled, all manual sensor controls specified +by android.sensor.* should be ignored. All other controls should +work as normal.

    +

    For example, if manual flash is enabled, flash firing should still +occur (and that the test pattern remain unmodified, since the flash +would not actually affect it).

    + + + + + HAL Implementation Details + + + +

    All test patterns are specified in the Bayer domain.

    +

    The HAL may choose to substitute test patterns from the sensor +with test patterns from on-device memory. In that case, it should be +indistinguishable to the ISP whether the data came from the +sensor interconnect bus (such as CSI2) or memory.

    + + + + + + @@ -11355,6 +11687,40 @@ supported? Need CCT for each!

    + + + + android.sensor.availableTestPatternModes + + + byte + + [public] + + + + + +

    Optional. Defaults to [OFF]. Lists the supported test +pattern modes for android.test.patternMode.

    + + + + + + +

    Must include OFF. All custom modes must be >= CUSTOM1

    + + + + + + + + + + + @@ -11697,6 +12063,156 @@ somewhere close to it.

    + + + + android.sensor.testPatternMode + + + int32 + + [public] + + + + + + +

    When enabled, the sensor sends a test pattern instead of +doing a real exposure from the camera.

    + + + + + + +

    Optional. Defaults to OFF. Value must be one of +android.sensor.availableTestPatternModes

    + + + + + + + + Details + + + +

    When a test pattern is enabled, all manual sensor controls specified +by android.sensor.* should be ignored. All other controls should +work as normal.

    +

    For example, if manual flash is enabled, flash firing should still +occur (and that the test pattern remain unmodified, since the flash +would not actually affect it).

    + + + + + HAL Implementation Details + + + +

    All test patterns are specified in the Bayer domain.

    +

    The HAL may choose to substitute test patterns from the sensor +with test patterns from on-device memory. In that case, it should be +indistinguishable to the ISP whether the data came from the +sensor interconnect bus (such as CSI2) or memory.

    + + + + + + diff --git a/camera/docs/metadata_properties.xml b/camera/docs/metadata_properties.xml index 8901d342..f35122c5 100644 --- a/camera/docs/metadata_properties.xml +++ b/camera/docs/metadata_properties.xml @@ -2453,7 +2453,48 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata kind="controls"> + + Specifies the number of pipeline stages the frame went + through from when it was exposed to when the final completed result + was available to the framework. + &lt;= android.request.pipelineMaxDepth +
    Depending on what settings are used in the request, and + what streams are configured, the data may undergo less processing, + and some pipeline stages skipped. + + See android.request.pipelineMaxDepth for more details. +
    + + This value must always represent the accurate count of how many + pipeline stages were actually used. + +
    + + + Specifies the number of maximum pipeline stages a frame + has to go through from when it's exposed to when it's available + to the framework. +
    A typical minimum value for this is 2 (one stage to expose, + one stage to readout) from the sensor. The ISP then usually adds + its own stages to do custom HW processing. Further stages may be + added by SW processing. + + Depending on what settings are used (e.g. YUV, JPEG) and what + processing is enabled (e.g. face detection), the actual pipeline + depth (specified by android.request.pipelineDepth) may be less than + the max pipeline depth. + + A pipeline depth of X stages is equivalent to a pipeline latency of + X frame intervals. + + This value will be 8 or less. +
    + + This value should be 4 or less. + +
    +
    @@ -3119,6 +3160,175 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata + + + + 4 + + + A pixel `[R, G_even, G_odd, B]` that supplies the test pattern + when android.sensor.testPatternMode is SOLID_COLOR. + + Optional. + Must be supported if android.sensor.availableTestPatternModes contains + SOLID_COLOR. +
    + Each color channel is treated as an unsigned 32-bit integer. + The camera device then uses the most significant X bits + that correspond to how many bits are in its Bayer raw sensor + output. + + For example, a sensor with RAW10 Bayer output would use the + 10 most significant bits from each color channel. +
    + + +
    + + + OFF + Default. No test pattern mode is used, and the camera + device returns captures from the image sensor. + + SOLID_COLOR + + Each pixel in `[R, G_even, G_odd, B]` is replaced by its + respective color channel provided in + android.sensor.testPatternData. + + For example: + + android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0] + + All green pixels are 100% green. All red/blue pixels are black. + + android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0] + + All red pixels are 100% red. Only the odd green pixels + are 100% green. All blue pixels are 100% black. + + + COLOR_BARS + + All pixel data is replaced with an 8-bar color pattern. + + The vertical bars (left-to-right) are as follows: + + * 100% white + * yellow + * cyan + * green + * magenta + * red + * blue + * black + + In general the image would look like the following: + + W Y C G M R B K + W Y C G M R B K + W Y C G M R B K + W Y C G M R B K + W Y C G M R B K + . . . . . . . . + . . . . . . . . + . . . . . . . . + + (B = Blue, K = Black) + + Each bar should take up 1/8 of the sensor pixel array width. + When this is not possible, the bar size should be rounded + down to the nearest integer and the pattern can repeat + on the right side. + + Each bar's height must always take up the full sensor + pixel array height. + + Each pixel in this test pattern must be set to either + 0% intensity or 100% intensity. + + + COLOR_BARS_FADE_TO_GRAY + + The test pattern is similar to COLOR_BARS, except that + each bar should start at its specified color at the top, + and fade to gray at the bottom. + + Furthermore each bar is further subdivided into a left and + right half. The left half should have a smooth gradient, + and the right half should have a quantized gradient. + + In particular, the right half's should consist of blocks of the + same color for 1/16th active sensor pixel array width. + + The least significant bits in the quantized gradient should + be copied from the most significant bits of the smooth gradient. + + The height of each bar should always be a multiple of 128. + When this is not the case, the pattern should repeat at the bottom + of the image. + + + PN9 + + All pixel data is replaced by a pseudo-random sequence + generated from a PN9 512-bit sequence (typically implemented + in hardware with a linear feedback shift register). + + The generator should be reset at the beginning of each frame, + and thus each subsequent raw frame with this test pattern should + be exactly the same as the last. + + + CUSTOM1 + The first custom test pattern. All custom patterns that are + available only on this camera device are at least this numeric + value. + + All of the custom test patterns will be static + (that is the raw image must not vary from frame to frame). + + + + When enabled, the sensor sends a test pattern instead of + doing a real exposure from the camera. + + Optional. Defaults to OFF. Value must be one of + android.sensor.availableTestPatternModes +
    + When a test pattern is enabled, all manual sensor controls specified + by android.sensor.* should be ignored. All other controls should + work as normal. + + For example, if manual flash is enabled, flash firing should still + occur (and that the test pattern remain unmodified, since the flash + would not actually affect it). +
    + + All test patterns are specified in the Bayer domain. + + The HAL may choose to substitute test patterns from the sensor + with test patterns from on-device memory. In that case, it should be + indistinguishable to the ISP whether the data came from the + sensor interconnect bus (such as CSI2) or memory. + +
    +
    + + + + + + + Optional. Defaults to [OFF]. Lists the supported test + pattern modes for android.test.patternMode. + + Must include OFF. All custom modes must be >= CUSTOM1 + + +
    diff --git a/camera/include/system/camera_metadata_tags.h b/camera/include/system/camera_metadata_tags.h index 14c098f4..43be7c5d 100644 --- a/camera/include/system/camera_metadata_tags.h +++ b/camera/include/system/camera_metadata_tags.h @@ -246,6 +246,8 @@ typedef enum camera_metadata_tag { ANDROID_REQUEST_TYPE, // enum | system ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, // int32[] | public ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS, // int32[] | system + ANDROID_REQUEST_PIPELINE_DEPTH, // byte | public + ANDROID_REQUEST_PIPELINE_MAX_DEPTH, // byte | public ANDROID_REQUEST_END, ANDROID_SCALER_CROP_REGION = // int32[] | public @@ -279,6 +281,9 @@ typedef enum camera_metadata_tag { ANDROID_SENSOR_REFERENCE_ILLUMINANT2, // byte | system ANDROID_SENSOR_TIMESTAMP, // int64 | public ANDROID_SENSOR_TEMPERATURE, // float | public + ANDROID_SENSOR_TEST_PATTERN_DATA, // int32[] | public + ANDROID_SENSOR_TEST_PATTERN_MODE, // enum | public + ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, // byte | public ANDROID_SENSOR_END, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE = // int32[] | public @@ -643,6 +648,16 @@ typedef enum camera_metadata_enum_android_sensor_reference_illuminant1 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24, } camera_metadata_enum_android_sensor_reference_illuminant1_t; +// ANDROID_SENSOR_TEST_PATTERN_MODE +typedef enum camera_metadata_enum_android_sensor_test_pattern_mode { + ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, + ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, + ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, + ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, + ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, + ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256, +} camera_metadata_enum_android_sensor_test_pattern_mode_t; + // ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT typedef enum camera_metadata_enum_android_sensor_info_color_filter_arrangement { diff --git a/camera/src/camera_metadata_tag_info.c b/camera/src/camera_metadata_tag_info.c index 13e3443e..c660fac3 100644 --- a/camera/src/camera_metadata_tag_info.c +++ b/camera/src/camera_metadata_tag_info.c @@ -366,6 +366,10 @@ static tag_info_t android_request[ANDROID_REQUEST_END - { "maxNumOutputStreams", TYPE_INT32 }, [ ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS - ANDROID_REQUEST_START ] = { "maxNumReprocessStreams", TYPE_INT32 }, + [ ANDROID_REQUEST_PIPELINE_DEPTH - ANDROID_REQUEST_START ] = + { "pipelineDepth", TYPE_BYTE }, + [ ANDROID_REQUEST_PIPELINE_MAX_DEPTH - ANDROID_REQUEST_START ] = + { "pipelineMaxDepth", TYPE_BYTE }, }; static tag_info_t android_scaler[ANDROID_SCALER_END - @@ -436,6 +440,12 @@ static tag_info_t android_sensor[ANDROID_SENSOR_END - { "timestamp", TYPE_INT64 }, [ ANDROID_SENSOR_TEMPERATURE - ANDROID_SENSOR_START ] = { "temperature", TYPE_FLOAT }, + [ ANDROID_SENSOR_TEST_PATTERN_DATA - ANDROID_SENSOR_START ] = + { "testPatternData", TYPE_INT32 }, + [ ANDROID_SENSOR_TEST_PATTERN_MODE - ANDROID_SENSOR_START ] = + { "testPatternMode", TYPE_INT32 }, + [ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES - ANDROID_SENSOR_START ] = + { "availableTestPatternModes", TYPE_BYTE }, }; static tag_info_t android_sensor_info[ANDROID_SENSOR_INFO_END - @@ -1503,6 +1513,12 @@ int camera_metadata_enum_snprint(uint32_t tag, case ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS: { break; } + case ANDROID_REQUEST_PIPELINE_DEPTH: { + break; + } + case ANDROID_REQUEST_PIPELINE_MAX_DEPTH: { + break; + } case ANDROID_SCALER_CROP_REGION: { break; @@ -1694,6 +1710,43 @@ int camera_metadata_enum_snprint(uint32_t tag, case ANDROID_SENSOR_TEMPERATURE: { break; } + case ANDROID_SENSOR_TEST_PATTERN_DATA: { + break; + } + case ANDROID_SENSOR_TEST_PATTERN_MODE: { + switch (value) { + case ANDROID_SENSOR_TEST_PATTERN_MODE_OFF: + msg = "OFF"; + ret = 0; + break; + case ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR: + msg = "SOLID_COLOR"; + ret = 0; + break; + case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS: + msg = "COLOR_BARS"; + ret = 0; + break; + case ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY: + msg = "COLOR_BARS_FADE_TO_GRAY"; + ret = 0; + break; + case ANDROID_SENSOR_TEST_PATTERN_MODE_PN9: + msg = "PN9"; + ret = 0; + break; + case ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1: + msg = "CUSTOM1"; + ret = 0; + break; + default: + msg = "error: enum value out of range"; + } + break; + } + case ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES: { + break; + } case ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE: { break; @@ -1997,4 +2050,4 @@ int camera_metadata_enum_snprint(uint32_t tag, } -#define CAMERA_METADATA_ENUM_STRING_MAX_SIZE 23 +#define CAMERA_METADATA_ENUM_STRING_MAX_SIZE 24 -- 2.11.0