From aa6bd6fb6d48b5f4761478e0c3026a237a0a5795 Mon Sep 17 00:00:00 2001 From: Igor Murashkin Date: Fri, 7 Feb 2014 14:31:31 -0800 Subject: [PATCH] camera_metadata: Simplify min frame durations and add available stream configs Change-Id: I41951c9a50a333e8e47d0ba616c316c065b604e8 --- camera/docs/docs.html | 514 ++++++++++++++++++++++----- camera/docs/metadata_properties.xml | 297 +++++++++++++--- camera/include/system/camera_metadata_tags.h | 9 + camera/src/camera_metadata_tag_info.c | 27 ++ 4 files changed, 701 insertions(+), 146 deletions(-) diff --git a/camera/docs/docs.html b/camera/docs/docs.html index 74aa052a..3b584a53 100644 --- a/camera/docs/docs.html +++ b/camera/docs/docs.html @@ -466,6 +466,9 @@
  • android.scaler.availableRawMinDurations
  • android.scaler.availableRawSizes
  • android.scaler.availableInputOutputFormatsMap
  • +
  • android.scaler.availableStreamConfigurations
  • +
  • android.scaler.availableMinFrameDurations
  • +
  • android.scaler.availableStallDurations
  • @@ -6485,10 +6488,10 @@ the same aspect ratio as the jpeg image.

  • The sizes will be sorted by increasing pixel area (width x height). If several resolutions have the same area, they will be sorted by increasing width.
  • The aspect ratio of the largest thumbnail size will be same as the -aspect ratio of largest size in android.scaler.availableJpegSizes. +aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations. The largest size is defined as the size that has the largest pixel area in a given size list.
  • -
  • Each size in android.scaler.availableJpegSizes will have at least +
  • Each output JPEG size in android.scaler.availableStreamConfigurations will have at least one corresponding size that has the same aspect ratio in availableThumbnailSizes, and vice versa.
  • All non (0, 0) sizes will have non-zero widths and heights.
  • @@ -10513,6 +10516,7 @@ for each resolution in andro +

    Deprecated. Do not use. TODO: Remove property.

    @@ -10565,6 +10569,7 @@ durations)

    +

    Deprecated. Do not use. TODO: Remove property.

    @@ -10665,6 +10670,7 @@ minimum supportable frame duration for that size.

    +

    Deprecated. Do not use. TODO: Remove property.

    @@ -10719,6 +10725,7 @@ encoders.

    +

    Deprecated. Do not use. TODO: Remove property.

    @@ -10796,6 +10803,7 @@ supportable frame duration for that size.

    +

    Deprecated. Do not use. TODO: Remove property.

    @@ -10849,7 +10857,8 @@ height

    -

    Must include: - sensor maximum resolution

    +

    Deprecated. Do not use. TODO: Remove property. +Must include: - sensor maximum resolution.

    @@ -10942,7 +10951,7 @@ if its dependent capability is supported:

    For ZSL-capable camera devices, using the RAW_OPAQUE format as either input or output will never hurt maximum frame rate (i.e. -android.scaler.availableStallDurations will not have RAW_OPAQUE).

    +
    android.scaler.availableStallDurations will not have RAW_OPAQUE).

    Attempting to configure an input stream with output streams not listed as available in this map is not valid.

    TODO: Add java type mapping for this property.

    @@ -10993,6 +11002,344 @@ system/core/include/system/graphics.h.

    + + + + android.scaler.availableStreamConfigurations + + + int32 + x + + + n x 4 + + [public] + + + + + + +

    The available stream configurations that this +camera device supports +(i.e. format, width, height, output/input stream).

    + + + + + + + + + + + + + + Details + + + +

    The configurations are listed as (format, width, height, input?) +tuples.

    +

    All camera devices will support sensor maximum resolution (defined by +android.sensor.info.activeArraySize) for the JPEG format.

    +

    For a given use case, the actual maximum supported resolution +may be lower than what is listed here, depending on the destination +Surface for the image data. For example, for recording video, +the video encoder chosen may have a maximum size limit (e.g. 1080p) +smaller than what the camera (e.g. maximum resolution is 3264x2448) +can provide.

    +

    Please reference the documentation for the image data destination to +check if it limits the maximum size for image data.

    +

    Not all output formats may be supported in a configuration with +an input stream of a particular format. For more details, see +android.scaler.availableInputOutputFormatsMap.

    +

    The following table describes the minimum required output stream +configurations based on the hardware level +(android.info.supportedHardwareLevel):

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    FormatSizeHardware LevelNotes
    JPEGandroid.sensor.info.activeArraySizeAny
    JPEG1920x1080 (1080p)Anyif 1080p <= activeArraySize
    JPEG1280x720 (720)Anyif 720p <= activeArraySize
    JPEG640x480 (480p)Anyif 480p <= activeArraySize
    JPEG320x240 (240p)Anyif 240p <= activeArraySize
    YUV_420_888all output sizes available for JPEGFULL
    YUV_420_888all output sizes available for JPEG, up to the maximum video sizeLIMITED
    IMPLEMENTATION_DEFINEDsame as YUV_420_888Any
    +

    Refer to android.request.availableCapabilities for additional +mandatory stream configurations on a per-capability basis.

    + + + + + HAL Implementation Details + + + +

    It is recommended (but not mandatory) to also include half/quarter +of sensor maximum resolution for JPEG formats (regardless of hardware +level).

    +

    (The following is a rewording of the above required table):

    +

    The HAL must include sensor maximum resolution (defined by +android.sensor.info.activeArraySize).

    +

    For FULL capability devices (android.info.supportedHardwareLevel == FULL), +the HAL must include all YUV_420_888 sizes that have JPEG sizes listed +here as output streams.

    +

    It must also include each below resolution if it is smaller than or +equal to the sensor maximum resolution (for both YUV_420_888 and JPEG +formats), as output streams:

    + +

    For LIMITED capability devices +(android.info.supportedHardwareLevel == LIMITED), +the HAL only has to list up to the maximum video size +supported by the device.

    +

    Regardless of hardware level, every output resolution available for +YUV_420_888 must also be available for IMPLEMENTATION_DEFINED.

    +

    This supercedes the following fields, which are now deprecated:

    + + + + + + + + + + + android.scaler.availableMinFrameDurations + + + int64 + x + + + 4 x n + + [public] + + + + + +

    This lists the minimum frame duration for each +format/size combination.

    + + + + (format, width, height, ns) x n + + + + + + + + + + + + Details + + + +

    This should correspond to the frame duration when only that +stream is active, with all processing (typically in android.*.mode) +set to either OFF or FAST.

    +

    When multiple streams are used in a request, the minimum frame +duration will be max(individual stream min durations).

    +

    The minimum frame duration of a stream (of a particular format, size) +is the same regardless of whether the stream is input or output.

    +

    See android.sensor.frameDuration and +android.scaler.availableStallDurations for more details about +calculating the max frame rate.

    + + + + + + + + + + + android.scaler.availableStallDurations + + + int64 + x + + + 4 x n + + [public] + + + + + +

    This lists the maximum stall duration for each +format/size combination.

    + + + + (format, width, height, ns) x n + + + + + + + + + + + + Details + + + +

    A stall duration is how much extra time would get added +to the normal minimum frame duration for a repeating request +that has streams with non-zero stall.

    +

    For example, consider JPEG captures which have the following +characteristics:

    + +

    In other words, using a repeating YUV request would result +in a steady frame rate (let's say it's 30 FPS). If a single +JPEG request is submitted periodically, the frame rate will stay +at 30 FPS (as long as we wait for the previous JPEG to return each +time). If we try to submit a repeating YUV + JPEG request, then +the frame rate will drop from 30 FPS.

    +

    In general, submitting a new request with a non-0 stall time +stream will not cause a frame rate drop unless there are still +outstanding buffers for that stream from previous requests.

    +

    Submitting a repeating request with streams (call this S) +is the same as setting the minimum frame duration from +the normal minimum frame duration corresponding to S, added with +the maximum stall duration for S.

    +

    If interleaving requests with and without a stall duration, +a request will stall by the maximum of the remaining times +for each can-stall stream with outstanding buffers.

    +

    This means that a stalling request will not have an exposure start +until the stall has completed.

    +

    This should correspond to the stall duration when only that stream is +active, with all processing (typically in android.*.mode) set to FAST +or OFF. Setting any of the processing modes to HIGH_QUALITY +effectively results in an indeterminate stall duration for all +streams in a request (the regular stall calculation rules are +ignored).

    +

    The following formats may always have a stall duration:

    + +

    The following formats will never have a stall duration:

    + +

    All other formats may or may not have an allowed stall duration on +a per-capability basis; refer to android.request.availableCapabilities +for more details.

    +

    See android.sensor.frameDuration for more information about +calculating the max frame rate (absent stalls).

    + + + + + HAL Implementation Details + + + +

    If possible, it is recommended that all non-JPEG formats +(such as RAW16) should not have a stall duration.

    + + + + + + @@ -11201,7 +11548,7 @@ start of next frame exposure.

    See android.sensor.info.maxFrameDuration, -android.scaler.available*MinDurations. The duration +android.scaler.availableMinFrameDurations. The duration is capped to max(duration, exposureTime + overhead).

    @@ -11248,59 +11595,42 @@ frame rate of a given configured stream set is governed only by the largest requested stream resolution.
  • Using more than one output stream in a request does not affect the frame duration.
  • -
  • JPEG streams act like processed YUV streams in requests for which -they are not included; in requests in which they are directly -referenced, they act as JPEG streams. This is because supporting a -JPEG stream requires the underlying YUV data to always be ready for -use by a JPEG encoder, but the encoder will only be used (and impact -frame duration) on requests that actually reference a JPEG stream.
  • -
  • The JPEG processor can run concurrently to the rest of the camera -pipeline, but cannot process more than 1 capture at a time.
  • +
  • Certain format-streams may need to do additional background processing +before data is consumed/produced by that stream. These processors +can run concurrently to the rest of the camera pipeline, but +cannot process more than 1 capture at a time.
  • The necessary information for the application, given the model above, -is provided via the android.scaler.available*MinDurations fields. +is provided via the android.scaler.availableMinFrameDurations field. These are used to determine the maximum frame rate / minimum frame duration that is possible for a given stream configuration.

    Specifically, the application can use the following rules to -determine the minimum frame duration it can request from the HAL +determine the minimum frame duration it can request from the camera device:

      -
    1. Given the application's currently configured set of output -streams, S, divide them into three sets: streams in a JPEG format -SJ, streams in a raw sensor format SR, and the rest ('processed') -SP.
    2. -
    3. For each subset of streams, find the largest resolution (by pixel -count) in the subset. This gives (at most) three resolutions RJ, -RR, and RP.
    4. -
    5. If RJ is greater than RP, set RP equal to RJ. If there is -no exact match for RP == RJ (in particular there isn't an available -processed resolution at the same size as RJ), then set RP equal -to the smallest processed resolution that is larger than RJ. If -there are no processed resolutions larger than RJ, then set RJ to -the processed resolution closest to RJ.
    6. -
    7. If RP is greater than RR, set RR equal to RP. If there is -no exact match for RR == RP (in particular there isn't an available -raw resolution at the same size as RP), then set RR equal to -or to the smallest raw resolution that is larger than RP. If -there are no raw resolutions larger than RP, then set RR to -the raw resolution closest to RP.
    8. -
    9. Look up the matching minimum frame durations in the property lists -android.scaler.availableJpegMinDurations, -android.scaler.availableRawMinDurations, and -android.scaler.availableProcessedMinDurations. This gives three -minimum frame durations FJ, FR, and FP.
    10. -
    11. If a stream of requests do not use a JPEG stream, then the minimum -supported frame duration for each request is max(FR, FP).
    12. -
    13. If a stream of requests all use the JPEG stream, then the minimum -supported frame duration for each request is max(FR, FP, FJ).
    14. -
    15. If a mix of JPEG-using and non-JPEG-using requests is submitted by -the application, then the HAL will have to delay JPEG-using requests -whenever the JPEG encoder is still busy processing an older capture. -This will happen whenever a JPEG-using request starts capture less -than FJ ns after a previous JPEG-using request. The minimum -supported frame duration will vary between the values calculated in -#6 and #7.
    16. +
    17. Let the set of currently configured input/output streams +be called S.
    18. +
    19. Find the minimum frame durations for each stream in S, by +looking it up in android.scaler.availableMinFrameDurations (with +its respective size/format). Let this set of frame durations be called +F.
    20. +
    21. For any given request R, the minimum frame duration allowed +for R is the maximum out of all values in F. Let the streams +used in R be called S_r.
    +

    If none of the streams in S_r have a stall time (listed in +android.scaler.availableStallDurations), then the frame duration in +F determines the steady state frame rate that the application will +get if it uses R as a repeating request. Let this special kind +of request be called Rsimple.

    +

    A repeating request Rsimple can be occasionally interleaved +by a single capture of a new request Rstall (which has at least +one in-use stream with a non-0 stall time) and if Rstall has the +same minimum frame duration this will not cause a frame rate loss +if all buffers from the previous Rstall have already been +delivered.

    +

    For more details about stalling, see +android.scaler.availableStallDurations.

    @@ -11863,6 +12193,13 @@ seconds (30e9 ns).

    android.sensor.maxFrameDuration must be greater or equal to the android.sensor.exposureTimeRange max value (since exposure time overrides frame duration).

    +

    Available minimum frame durations for JPEG must be no greater +than that of the YUV_420_888/IMPLEMENTATION_DEFINED +minimum frame durations (for that respective size).

    +

    Since JPEG processing is considered offline and can take longer than +a single uncompressed capture, refer to +android.scaler.availableStallDurations +for details about encoding this scenario.

    @@ -12863,7 +13200,7 @@ start of next frame exposure.

    See android.sensor.info.maxFrameDuration, -android.scaler.available*MinDurations. The duration +android.scaler.availableMinFrameDurations. The duration is capped to max(duration, exposureTime + overhead).

    @@ -12910,59 +13247,42 @@ frame rate of a given configured stream set is governed only by the largest requested stream resolution.
  • Using more than one output stream in a request does not affect the frame duration.
  • -
  • JPEG streams act like processed YUV streams in requests for which -they are not included; in requests in which they are directly -referenced, they act as JPEG streams. This is because supporting a -JPEG stream requires the underlying YUV data to always be ready for -use by a JPEG encoder, but the encoder will only be used (and impact -frame duration) on requests that actually reference a JPEG stream.
  • -
  • The JPEG processor can run concurrently to the rest of the camera -pipeline, but cannot process more than 1 capture at a time.
  • +
  • Certain format-streams may need to do additional background processing +before data is consumed/produced by that stream. These processors +can run concurrently to the rest of the camera pipeline, but +cannot process more than 1 capture at a time.
  • The necessary information for the application, given the model above, -is provided via the android.scaler.available*MinDurations fields. +is provided via the android.scaler.availableMinFrameDurations field. These are used to determine the maximum frame rate / minimum frame duration that is possible for a given stream configuration.

    Specifically, the application can use the following rules to -determine the minimum frame duration it can request from the HAL +determine the minimum frame duration it can request from the camera device:

      -
    1. Given the application's currently configured set of output -streams, S, divide them into three sets: streams in a JPEG format -SJ, streams in a raw sensor format SR, and the rest ('processed') -SP.
    2. -
    3. For each subset of streams, find the largest resolution (by pixel -count) in the subset. This gives (at most) three resolutions RJ, -RR, and RP.
    4. -
    5. If RJ is greater than RP, set RP equal to RJ. If there is -no exact match for RP == RJ (in particular there isn't an available -processed resolution at the same size as RJ), then set RP equal -to the smallest processed resolution that is larger than RJ. If -there are no processed resolutions larger than RJ, then set RJ to -the processed resolution closest to RJ.
    6. -
    7. If RP is greater than RR, set RR equal to RP. If there is -no exact match for RR == RP (in particular there isn't an available -raw resolution at the same size as RP), then set RR equal to -or to the smallest raw resolution that is larger than RP. If -there are no raw resolutions larger than RP, then set RR to -the raw resolution closest to RP.
    8. -
    9. Look up the matching minimum frame durations in the property lists -android.scaler.availableJpegMinDurations, -android.scaler.availableRawMinDurations, and -android.scaler.availableProcessedMinDurations. This gives three -minimum frame durations FJ, FR, and FP.
    10. -
    11. If a stream of requests do not use a JPEG stream, then the minimum -supported frame duration for each request is max(FR, FP).
    12. -
    13. If a stream of requests all use the JPEG stream, then the minimum -supported frame duration for each request is max(FR, FP, FJ).
    14. -
    15. If a mix of JPEG-using and non-JPEG-using requests is submitted by -the application, then the HAL will have to delay JPEG-using requests -whenever the JPEG encoder is still busy processing an older capture. -This will happen whenever a JPEG-using request starts capture less -than FJ ns after a previous JPEG-using request. The minimum -supported frame duration will vary between the values calculated in -#6 and #7.
    16. +
    17. Let the set of currently configured input/output streams +be called S.
    18. +
    19. Find the minimum frame durations for each stream in S, by +looking it up in android.scaler.availableMinFrameDurations (with +its respective size/format). Let this set of frame durations be called +F.
    20. +
    21. For any given request R, the minimum frame duration allowed +for R is the maximum out of all values in F. Let the streams +used in R be called S_r.
    +

    If none of the streams in S_r have a stall time (listed in +android.scaler.availableStallDurations), then the frame duration in +F determines the steady state frame rate that the application will +get if it uses R as a repeating request. Let this special kind +of request be called Rsimple.

    +

    A repeating request Rsimple can be occasionally interleaved +by a single capture of a new request Rstall (which has at least +one in-use stream with a non-0 stall time) and if Rstall has the +same minimum frame duration this will not cause a frame rate loss +if all buffers from the previous Rstall have already been +delivered.

    +

    For more details about stalling, see +android.scaler.availableStallDurations.

    @@ -16531,6 +16851,8 @@ to know when sensor settings have been applied.

  • android.scaler.availableProcessedMinDurations (static)
  • android.scaler.availableProcessedSizes (static)
  • android.scaler.availableRawMinDurations (static)
  • +
  • android.scaler.availableMinFrameDurations (static)
  • +
  • android.scaler.availableStallDurations (static)
  • android.sensor.frameDuration (controls)
  • android.sensor.info.sensitivityRange (static)
  • android.sensor.info.maxFrameDuration (static)
  • diff --git a/camera/docs/metadata_properties.xml b/camera/docs/metadata_properties.xml index e7581ec5..f82e9a41 100644 --- a/camera/docs/metadata_properties.xml +++ b/camera/docs/metadata_properties.xml @@ -1893,10 +1893,10 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata * The sizes will be sorted by increasing pixel area (width x height). If several resolutions have the same area, they will be sorted by increasing width. * The aspect ratio of the largest thumbnail size will be same as the - aspect ratio of largest size in android.scaler.availableJpegSizes. + aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations. The largest size is defined as the size that has the largest pixel area in a given size list. - * Each size in android.scaler.availableJpegSizes will have at least + * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least one corresponding size that has the same aspect ratio in availableThumbnailSizes, and vice versa. * All non (0, 0) sizes will have non-zero widths and heights. @@ -3149,6 +3149,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata for each resolution in android.scaler.availableJpegSizes. ns + **Deprecated**. Do not use. TODO: Remove property.
    This corresponds to the minimum steady-state frame duration when only that JPEG stream is active and captured in a burst, with all @@ -3166,6 +3167,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata 2 The JPEG resolutions that are supported by this camera device. + **Deprecated**. Do not use. TODO: Remove property.
    The resolutions are listed as `(width, height)` pairs. All camera devices will support sensor maximum resolution (defined by android.sensor.info.activeArraySize). @@ -3193,9 +3195,9 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata For each available processed output size (defined in android.scaler.availableProcessedSizes), this property lists the minimum supportable frame duration for that size. - ns + **Deprecated**. Do not use. TODO: Remove property.
    This should correspond to the frame duration when only that processed stream is active, with all processing (typically in android.*.mode) @@ -3216,6 +3218,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata processed output streams, such as YV12, NV12, and platform opaque YUV/RGB streams to the GPU or video encoders. + **Deprecated**. Do not use. TODO: Remove property.
    The resolutions are listed as `(width, height)` pairs. @@ -3256,6 +3259,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata supportable frame duration for that size. ns + **Deprecated**. Do not use. TODO: Remove property.
    Should correspond to the frame duration when only the raw stream is active. @@ -3274,7 +3278,8 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata The resolutions available for use with raw sensor output streams, listed as width, height - Must include: - sensor maximum resolution + **Deprecated**. Do not use. TODO: Remove property. + Must include: - sensor maximum resolution. @@ -3356,6 +3361,203 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata system/core/include/system/graphics.h. + + + n + 4 + + + OUTPUT + INPUT + + The available stream configurations that this + camera device supports + (i.e. format, width, height, output/input stream). + +
    + The configurations are listed as `(format, width, height, input?)` + tuples. + + All camera devices will support sensor maximum resolution (defined by + android.sensor.info.activeArraySize) for the JPEG format. + + For a given use case, the actual maximum supported resolution + may be lower than what is listed here, depending on the destination + Surface for the image data. For example, for recording video, + the video encoder chosen may have a maximum size limit (e.g. 1080p) + smaller than what the camera (e.g. maximum resolution is 3264x2448) + can provide. + + Please reference the documentation for the image data destination to + check if it limits the maximum size for image data. + + Not all output formats may be supported in a configuration with + an input stream of a particular format. For more details, see + android.scaler.availableInputOutputFormatsMap. + + The following table describes the minimum required output stream + configurations based on the hardware level + (android.info.supportedHardwareLevel): + + Format | Size | Hardware Level | Notes + :-------------:|:--------------------------------------------:|:--------------:|:--------------: + JPEG | android.sensor.info.activeArraySize | Any | + JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize + JPEG | 1280x720 (720) | Any | if 720p <= activeArraySize + JPEG | 640x480 (480p) | Any | if 480p <= activeArraySize + JPEG | 320x240 (240p) | Any | if 240p <= activeArraySize + YUV_420_888 | all output sizes available for JPEG | FULL | + YUV_420_888 | all output sizes available for JPEG, up to the maximum video size | LIMITED | + IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any | + + Refer to android.request.availableCapabilities for additional + mandatory stream configurations on a per-capability basis. +
    + + It is recommended (but not mandatory) to also include half/quarter + of sensor maximum resolution for JPEG formats (regardless of hardware + level). + + (The following is a rewording of the above required table): + + The HAL must include sensor maximum resolution (defined by + android.sensor.info.activeArraySize). + + For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), + the HAL must include all YUV_420_888 sizes that have JPEG sizes listed + here as output streams. + + It must also include each below resolution if it is smaller than or + equal to the sensor maximum resolution (for both YUV_420_888 and JPEG + formats), as output streams: + + * 240p (320 x 240) + * 480p (640 x 480) + * 720p (1280 x 720) + * 1080p (1920 x 1080) + + For LIMITED capability devices + (`android.info.supportedHardwareLevel == LIMITED`), + the HAL only has to list up to the maximum video size + supported by the device. + + Regardless of hardware level, every output resolution available for + YUV_420_888 must also be available for IMPLEMENTATION_DEFINED. + + This supercedes the following fields, which are now deprecated: + + * availableFormats + * available[Processed,Raw,Jpeg]Sizes + +
    + + + 4 + n + + This lists the minimum frame duration for each + format/size combination. + + (format, width, height, ns) x n +
    + This should correspond to the frame duration when only that + stream is active, with all processing (typically in android.*.mode) + set to either OFF or FAST. + + When multiple streams are used in a request, the minimum frame + duration will be max(individual stream min durations). + + The minimum frame duration of a stream (of a particular format, size) + is the same regardless of whether the stream is input or output. + + See android.sensor.frameDuration and + android.scaler.availableStallDurations for more details about + calculating the max frame rate. +
    + +
    + + + 4 + n + + This lists the maximum stall duration for each + format/size combination. + + (format, width, height, ns) x n +
    + A stall duration is how much extra time would get added + to the normal minimum frame duration for a repeating request + that has streams with non-zero stall. + + For example, consider JPEG captures which have the following + characteristics: + + * JPEG streams act like processed YUV streams in requests for which + they are not included; in requests in which they are directly + referenced, they act as JPEG streams. This is because supporting a + JPEG stream requires the underlying YUV data to always be ready for + use by a JPEG encoder, but the encoder will only be used (and impact + frame duration) on requests that actually reference a JPEG stream. + * The JPEG processor can run concurrently to the rest of the camera + pipeline, but cannot process more than 1 capture at a time. + + In other words, using a repeating YUV request would result + in a steady frame rate (let's say it's 30 FPS). If a single + JPEG request is submitted periodically, the frame rate will stay + at 30 FPS (as long as we wait for the previous JPEG to return each + time). If we try to submit a repeating YUV + JPEG request, then + the frame rate will drop from 30 FPS. + + In general, submitting a new request with a non-0 stall time + stream will _not_ cause a frame rate drop unless there are still + outstanding buffers for that stream from previous requests. + + Submitting a repeating request with streams (call this `S`) + is the same as setting the minimum frame duration from + the normal minimum frame duration corresponding to `S`, added with + the maximum stall duration for `S`. + + If interleaving requests with and without a stall duration, + a request will stall by the maximum of the remaining times + for each can-stall stream with outstanding buffers. + + This means that a stalling request will not have an exposure start + until the stall has completed. + + This should correspond to the stall duration when only that stream is + active, with all processing (typically in android.*.mode) set to FAST + or OFF. Setting any of the processing modes to HIGH_QUALITY + effectively results in an indeterminate stall duration for all + streams in a request (the regular stall calculation rules are + ignored). + + The following formats may always have a stall duration: + + * JPEG + * RAW16 + + The following formats will never have a stall duration: + + * YUV_420_888 + * IMPLEMENTATION_DEFINED + + All other formats may or may not have an allowed stall duration on + a per-capability basis; refer to android.request.availableCapabilities + for more details. + + See android.sensor.frameDuration for more information about + calculating the max frame rate (absent stalls). +
    + + If possible, it is recommended that all non-JPEG formats + (such as RAW16) should not have a stall duration. + + +
    @@ -3377,7 +3579,7 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata start of next frame exposure. nanoseconds See android.sensor.info.maxFrameDuration, - android.scaler.available*MinDurations. The duration + android.scaler.availableMinFrameDurations. The duration is capped to `max(duration, exposureTime + overhead)`.
    The maximum frame rate that can be supported by a camera subsystem is @@ -3411,59 +3613,45 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata largest requested stream resolution. * Using more than one output stream in a request does not affect the frame duration. - * JPEG streams act like processed YUV streams in requests for which - they are not included; in requests in which they are directly - referenced, they act as JPEG streams. This is because supporting a - JPEG stream requires the underlying YUV data to always be ready for - use by a JPEG encoder, but the encoder will only be used (and impact - frame duration) on requests that actually reference a JPEG stream. - * The JPEG processor can run concurrently to the rest of the camera - pipeline, but cannot process more than 1 capture at a time. + * Certain format-streams may need to do additional background processing + before data is consumed/produced by that stream. These processors + can run concurrently to the rest of the camera pipeline, but + cannot process more than 1 capture at a time. The necessary information for the application, given the model above, - is provided via the android.scaler.available*MinDurations fields. + is provided via the android.scaler.availableMinFrameDurations field. These are used to determine the maximum frame rate / minimum frame duration that is possible for a given stream configuration. Specifically, the application can use the following rules to - determine the minimum frame duration it can request from the HAL + determine the minimum frame duration it can request from the camera device: - 1. Given the application's currently configured set of output - streams, `S`, divide them into three sets: streams in a JPEG format - `SJ`, streams in a raw sensor format `SR`, and the rest ('processed') - `SP`. - 1. For each subset of streams, find the largest resolution (by pixel - count) in the subset. This gives (at most) three resolutions `RJ`, - `RR`, and `RP`. - 1. If `RJ` is greater than `RP`, set `RP` equal to `RJ`. If there is - no exact match for `RP == RJ` (in particular there isn't an available - processed resolution at the same size as `RJ`), then set `RP` equal - to the smallest processed resolution that is larger than `RJ`. If - there are no processed resolutions larger than `RJ`, then set `RJ` to - the processed resolution closest to `RJ`. - 1. If `RP` is greater than `RR`, set `RR` equal to `RP`. If there is - no exact match for `RR == RP` (in particular there isn't an available - raw resolution at the same size as `RP`), then set `RR` equal to - or to the smallest raw resolution that is larger than `RP`. If - there are no raw resolutions larger than `RP`, then set `RR` to - the raw resolution closest to `RP`. - 1. Look up the matching minimum frame durations in the property lists - android.scaler.availableJpegMinDurations, - android.scaler.availableRawMinDurations, and - android.scaler.availableProcessedMinDurations. This gives three - minimum frame durations `FJ`, `FR`, and `FP`. - 1. If a stream of requests do not use a JPEG stream, then the minimum - supported frame duration for each request is `max(FR, FP)`. - 1. If a stream of requests all use the JPEG stream, then the minimum - supported frame duration for each request is `max(FR, FP, FJ)`. - 1. If a mix of JPEG-using and non-JPEG-using requests is submitted by - the application, then the HAL will have to delay JPEG-using requests - whenever the JPEG encoder is still busy processing an older capture. - This will happen whenever a JPEG-using request starts capture less - than `FJ` _ns_ after a previous JPEG-using request. The minimum - supported frame duration will vary between the values calculated in - \#6 and \#7. + 1. Let the set of currently configured input/output streams + be called `S`. + 1. Find the minimum frame durations for each stream in `S`, by + looking it up in android.scaler.availableMinFrameDurations (with + its respective size/format). Let this set of frame durations be called + `F`. + 1. For any given request `R`, the minimum frame duration allowed + for `R` is the maximum out of all values in `F`. Let the streams + used in `R` be called `S_r`. + + If none of the streams in `S_r` have a stall time (listed in + android.scaler.availableStallDurations), then the frame duration in + `F` determines the steady state frame rate that the application will + get if it uses `R` as a repeating request. Let this special kind + of request be called `Rsimple`. + + A repeating request `Rsimple` can be _occasionally_ interleaved + by a single capture of a new request `Rstall` (which has at least + one in-use stream with a non-0 stall time) and if `Rstall` has the + same minimum frame duration this will not cause a frame rate loss + if all buffers from the previous `Rstall` have already been + delivered. + + For more details about stalling, see + android.scaler.availableStallDurations.
    @@ -3565,6 +3753,15 @@ xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata android.sensor.maxFrameDuration must be greater or equal to the android.sensor.exposureTimeRange max value (since exposure time overrides frame duration). + + Available minimum frame durations for JPEG must be no greater + than that of the YUV_420_888/IMPLEMENTATION_DEFINED + minimum frame durations (for that respective size). + + Since JPEG processing is considered offline and can take longer than + a single uncompressed capture, refer to + android.scaler.availableStallDurations + for details about encoding this scenario. diff --git a/camera/include/system/camera_metadata_tags.h b/camera/include/system/camera_metadata_tags.h index 65d446ad..08e5032f 100644 --- a/camera/include/system/camera_metadata_tags.h +++ b/camera/include/system/camera_metadata_tags.h @@ -270,6 +270,9 @@ typedef enum camera_metadata_tag { ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, // int64[] | system ANDROID_SCALER_AVAILABLE_RAW_SIZES, // int32[] | system ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,// int32[] | public + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, // enum[] | public + ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, // int64[] | public + ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, // int64[] | public ANDROID_SCALER_END, ANDROID_SENSOR_EXPOSURE_TIME = // int64 | public @@ -668,6 +671,12 @@ typedef enum camera_metadata_enum_android_scaler_available_formats { ANDROID_SCALER_AVAILABLE_FORMATS_BLOB = 0x21, } camera_metadata_enum_android_scaler_available_formats_t; +// ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS +typedef enum camera_metadata_enum_android_scaler_available_stream_configurations { + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT, +} camera_metadata_enum_android_scaler_available_stream_configurations_t; + // ANDROID_SENSOR_REFERENCE_ILLUMINANT1 typedef enum camera_metadata_enum_android_sensor_reference_illuminant1 { diff --git a/camera/src/camera_metadata_tag_info.c b/camera/src/camera_metadata_tag_info.c index 2d1fba77..942d2cfe 100644 --- a/camera/src/camera_metadata_tag_info.c +++ b/camera/src/camera_metadata_tag_info.c @@ -413,6 +413,12 @@ static tag_info_t android_scaler[ANDROID_SCALER_END - [ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP - ANDROID_SCALER_START ] = { "availableInputOutputFormatsMap", TYPE_INT32 }, + [ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS - ANDROID_SCALER_START ] = + { "availableStreamConfigurations", TYPE_INT32 }, + [ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS - ANDROID_SCALER_START ] = + { "availableMinFrameDurations", TYPE_INT64 }, + [ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS - ANDROID_SCALER_START ] = + { "availableStallDurations", TYPE_INT64 }, }; static tag_info_t android_sensor[ANDROID_SENSOR_END - @@ -1701,6 +1707,27 @@ int camera_metadata_enum_snprint(uint32_t tag, case ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP: { break; } + case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS: { + switch (value) { + case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT: + msg = "OUTPUT"; + ret = 0; + break; + case ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT: + msg = "INPUT"; + ret = 0; + break; + default: + msg = "error: enum value out of range"; + } + break; + } + case ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS: { + break; + } + case ANDROID_SCALER_AVAILABLE_STALL_DURATIONS: { + break; + } case ANDROID_SENSOR_EXPOSURE_TIME: { break; -- 2.11.0