2 * Copyright 2020 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
7 #include "cros_gralloc/gralloc4/CrosGralloc4Utils.h"
10 #include <unordered_map>
12 #include <aidl/android/hardware/graphics/common/PlaneLayoutComponent.h>
13 #include <aidl/android/hardware/graphics/common/PlaneLayoutComponentType.h>
14 #include <android-base/stringprintf.h>
15 #include <android-base/strings.h>
16 #include <cutils/native_handle.h>
17 #include <gralloctypes/Gralloc4.h>
19 #include "cros_gralloc/cros_gralloc_helpers.h"
21 using aidl::android::hardware::graphics::common::PlaneLayout;
22 using aidl::android::hardware::graphics::common::PlaneLayoutComponent;
23 using aidl::android::hardware::graphics::common::PlaneLayoutComponentType;
24 using android::hardware::hidl_bitfield;
25 using android::hardware::hidl_handle;
26 using android::hardware::graphics::common::V1_2::BufferUsage;
27 using android::hardware::graphics::common::V1_2::PixelFormat;
29 using BufferDescriptorInfo =
30 android::hardware::graphics::mapper::V4_0::IMapper::BufferDescriptorInfo;
32 std::string getPixelFormatString(PixelFormat format) {
34 case PixelFormat::BGRA_8888:
35 return "PixelFormat::BGRA_8888";
36 case PixelFormat::BLOB:
37 return "PixelFormat::BLOB";
38 case PixelFormat::DEPTH_16:
39 return "PixelFormat::DEPTH_16";
40 case PixelFormat::DEPTH_24:
41 return "PixelFormat::DEPTH_24";
42 case PixelFormat::DEPTH_24_STENCIL_8:
43 return "PixelFormat::DEPTH_24_STENCIL_8";
44 case PixelFormat::DEPTH_32F:
45 return "PixelFormat::DEPTH_24";
46 case PixelFormat::DEPTH_32F_STENCIL_8:
47 return "PixelFormat::DEPTH_24_STENCIL_8";
48 case PixelFormat::HSV_888:
49 return "PixelFormat::HSV_888";
50 case PixelFormat::IMPLEMENTATION_DEFINED:
51 return "PixelFormat::IMPLEMENTATION_DEFINED";
52 case PixelFormat::RAW10:
53 return "PixelFormat::RAW10";
54 case PixelFormat::RAW12:
55 return "PixelFormat::RAW12";
56 case PixelFormat::RAW16:
57 return "PixelFormat::RAW16";
58 case PixelFormat::RAW_OPAQUE:
59 return "PixelFormat::RAW_OPAQUE";
60 case PixelFormat::RGBA_1010102:
61 return "PixelFormat::RGBA_1010102";
62 case PixelFormat::RGBA_8888:
63 return "PixelFormat::RGBA_8888";
64 case PixelFormat::RGBA_FP16:
65 return "PixelFormat::RGBA_FP16";
66 case PixelFormat::RGBX_8888:
67 return "PixelFormat::RGBX_8888";
68 case PixelFormat::RGB_565:
69 return "PixelFormat::RGB_565";
70 case PixelFormat::RGB_888:
71 return "PixelFormat::RGB_888";
72 case PixelFormat::STENCIL_8:
73 return "PixelFormat::STENCIL_8";
74 case PixelFormat::Y16:
75 return "PixelFormat::Y16";
77 return "PixelFormat::Y8";
78 case PixelFormat::YCBCR_420_888:
79 return "PixelFormat::YCBCR_420_888";
80 case PixelFormat::YCBCR_422_I:
81 return "PixelFormat::YCBCR_422_I";
82 case PixelFormat::YCBCR_422_SP:
83 return "PixelFormat::YCBCR_422_SP";
84 case PixelFormat::YCBCR_P010:
85 return "PixelFormat::YCBCR_P010";
86 case PixelFormat::YCRCB_420_SP:
87 return "PixelFormat::YCRCB_420_SP";
88 case PixelFormat::YV12:
89 return "PixelFormat::YV12";
91 return android::base::StringPrintf("PixelFormat::Unknown(%d)", static_cast<uint32_t>(format));
94 std::string getUsageString(hidl_bitfield<BufferUsage> bufferUsage) {
95 using Underlying = typename std::underlying_type<BufferUsage>::type;
97 Underlying usage = static_cast<Underlying>(bufferUsage);
99 std::vector<std::string> usages;
100 if (usage & BufferUsage::CAMERA_INPUT) {
101 usage &= ~static_cast<Underlying>(BufferUsage::CAMERA_INPUT);
102 usages.push_back("BufferUsage::CAMERA_INPUT");
104 if (usage & BufferUsage::CAMERA_OUTPUT) {
105 usage &= ~static_cast<Underlying>(BufferUsage::CAMERA_OUTPUT);
106 usages.push_back("BufferUsage::CAMERA_OUTPUT");
108 if (usage & BufferUsage::COMPOSER_CURSOR) {
109 usage &= ~static_cast<Underlying>(BufferUsage::COMPOSER_CURSOR);
110 usages.push_back("BufferUsage::COMPOSER_CURSOR");
112 if (usage & BufferUsage::COMPOSER_OVERLAY) {
113 usage &= ~static_cast<Underlying>(BufferUsage::COMPOSER_OVERLAY);
114 usages.push_back("BufferUsage::COMPOSER_OVERLAY");
116 if (usage & BufferUsage::COMPOSER_CLIENT_TARGET) {
117 usage &= ~static_cast<Underlying>(BufferUsage::COMPOSER_CLIENT_TARGET);
118 usages.push_back("BufferUsage::COMPOSER_CLIENT_TARGET");
120 if (usage & BufferUsage::CPU_READ_OFTEN) {
121 usage &= ~static_cast<Underlying>(BufferUsage::CPU_READ_OFTEN);
122 usages.push_back("BufferUsage::CPU_READ_OFTEN");
124 if (usage & BufferUsage::CPU_READ_NEVER) {
125 usage &= ~static_cast<Underlying>(BufferUsage::CPU_READ_NEVER);
126 usages.push_back("BufferUsage::CPU_READ_NEVER");
128 if (usage & BufferUsage::CPU_READ_RARELY) {
129 usage &= ~static_cast<Underlying>(BufferUsage::CPU_READ_RARELY);
130 usages.push_back("BufferUsage::CPU_READ_RARELY");
132 if (usage & BufferUsage::CPU_WRITE_NEVER) {
133 usage &= ~static_cast<Underlying>(BufferUsage::CPU_WRITE_NEVER);
134 usages.push_back("BufferUsage::CPU_WRITE_NEVER");
136 if (usage & BufferUsage::CPU_WRITE_OFTEN) {
137 usage &= ~static_cast<Underlying>(BufferUsage::CPU_WRITE_OFTEN);
138 usages.push_back("BufferUsage::CPU_WRITE_OFTEN");
140 if (usage & BufferUsage::CPU_WRITE_RARELY) {
141 usage &= ~static_cast<Underlying>(BufferUsage::CPU_WRITE_RARELY);
142 usages.push_back("BufferUsage::CPU_WRITE_RARELY");
144 if (usage & BufferUsage::GPU_RENDER_TARGET) {
145 usage &= ~static_cast<Underlying>(BufferUsage::GPU_RENDER_TARGET);
146 usages.push_back("BufferUsage::GPU_RENDER_TARGET");
148 if (usage & BufferUsage::GPU_TEXTURE) {
149 usage &= ~static_cast<Underlying>(BufferUsage::GPU_TEXTURE);
150 usages.push_back("BufferUsage::GPU_TEXTURE");
152 if (usage & BufferUsage::PROTECTED) {
153 usage &= ~static_cast<Underlying>(BufferUsage::PROTECTED);
154 usages.push_back("BufferUsage::PROTECTED");
156 if (usage & BufferUsage::RENDERSCRIPT) {
157 usage &= ~static_cast<Underlying>(BufferUsage::RENDERSCRIPT);
158 usages.push_back("BufferUsage::RENDERSCRIPT");
160 if (usage & BufferUsage::VIDEO_DECODER) {
161 usage &= ~static_cast<Underlying>(BufferUsage::VIDEO_DECODER);
162 usages.push_back("BufferUsage::VIDEO_DECODER");
164 if (usage & BufferUsage::VIDEO_ENCODER) {
165 usage &= ~static_cast<Underlying>(BufferUsage::VIDEO_ENCODER);
166 usages.push_back("BufferUsage::VIDEO_ENCODER");
170 usages.push_back(android::base::StringPrintf("UnknownUsageBits-%" PRIu64, usage));
173 return android::base::Join(usages, '|');
176 int convertToDrmFormat(PixelFormat format, uint32_t* outDrmFormat) {
178 case PixelFormat::BGRA_8888:
179 *outDrmFormat = DRM_FORMAT_ARGB8888;
182 * Choose DRM_FORMAT_R8 because <system/graphics.h> requires the buffers
183 * with a format HAL_PIXEL_FORMAT_BLOB have a height of 1, and width
184 * equal to their size in bytes.
186 case PixelFormat::BLOB:
187 *outDrmFormat = DRM_FORMAT_R8;
189 case PixelFormat::DEPTH_16:
191 case PixelFormat::DEPTH_24:
193 case PixelFormat::DEPTH_24_STENCIL_8:
195 case PixelFormat::DEPTH_32F:
197 case PixelFormat::DEPTH_32F_STENCIL_8:
199 case PixelFormat::HSV_888:
201 case PixelFormat::IMPLEMENTATION_DEFINED:
202 *outDrmFormat = DRM_FORMAT_FLEX_IMPLEMENTATION_DEFINED;
204 case PixelFormat::RAW10:
206 case PixelFormat::RAW12:
208 case PixelFormat::RAW16:
209 *outDrmFormat = DRM_FORMAT_R16;
212 case PixelFormat::RAW_OPAQUE:
214 case PixelFormat::RGBA_1010102:
215 *outDrmFormat = DRM_FORMAT_ABGR2101010;
217 case PixelFormat::RGBA_8888:
218 *outDrmFormat = DRM_FORMAT_ABGR8888;
220 case PixelFormat::RGBA_FP16:
221 *outDrmFormat = DRM_FORMAT_ABGR16161616F;
223 case PixelFormat::RGBX_8888:
224 *outDrmFormat = DRM_FORMAT_XBGR8888;
226 case PixelFormat::RGB_565:
227 *outDrmFormat = DRM_FORMAT_RGB565;
229 case PixelFormat::RGB_888:
230 *outDrmFormat = DRM_FORMAT_RGB888;
232 case PixelFormat::STENCIL_8:
234 case PixelFormat::Y16:
235 *outDrmFormat = DRM_FORMAT_R16;
237 case PixelFormat::Y8:
238 *outDrmFormat = DRM_FORMAT_R8;
240 case PixelFormat::YCBCR_420_888:
241 *outDrmFormat = DRM_FORMAT_FLEX_YCbCr_420_888;
243 case PixelFormat::YCBCR_422_SP:
245 case PixelFormat::YCBCR_422_I:
247 case PixelFormat::YCBCR_P010:
248 *outDrmFormat = DRM_FORMAT_P010;
250 case PixelFormat::YCRCB_420_SP:
251 *outDrmFormat = DRM_FORMAT_NV21;
253 case PixelFormat::YV12:
254 *outDrmFormat = DRM_FORMAT_YVU420_ANDROID;
260 int convertToBufferUsage(uint64_t grallocUsage, uint64_t* outBufferUsage) {
261 uint64_t bufferUsage = BO_USE_NONE;
263 if ((grallocUsage & BufferUsage::CPU_READ_MASK) ==
264 static_cast<uint64_t>(BufferUsage::CPU_READ_RARELY)) {
265 bufferUsage |= BO_USE_SW_READ_RARELY;
267 if ((grallocUsage & BufferUsage::CPU_READ_MASK) ==
268 static_cast<uint64_t>(BufferUsage::CPU_READ_OFTEN)) {
269 bufferUsage |= BO_USE_SW_READ_OFTEN;
271 if ((grallocUsage & BufferUsage::CPU_WRITE_MASK) ==
272 static_cast<uint64_t>(BufferUsage::CPU_WRITE_RARELY)) {
273 bufferUsage |= BO_USE_SW_WRITE_RARELY;
275 if ((grallocUsage & BufferUsage::CPU_WRITE_MASK) ==
276 static_cast<uint64_t>(BufferUsage::CPU_WRITE_OFTEN)) {
277 bufferUsage |= BO_USE_SW_WRITE_OFTEN;
279 if (grallocUsage & BufferUsage::GPU_TEXTURE) {
280 bufferUsage |= BO_USE_TEXTURE;
282 if (grallocUsage & BufferUsage::GPU_RENDER_TARGET) {
283 bufferUsage |= BO_USE_RENDERING;
285 if (grallocUsage & BufferUsage::COMPOSER_OVERLAY) {
286 /* HWC wants to use display hardware, but can defer to OpenGL. */
287 bufferUsage |= BO_USE_SCANOUT | BO_USE_TEXTURE;
289 if (grallocUsage & BufferUsage::COMPOSER_CLIENT_TARGET) {
290 /* GPU composition target buffer */
291 bufferUsage |= BO_USE_COMPOSER_TARGET;
293 /* Map this flag to linear until real HW protection is available on Android. */
294 if (grallocUsage & BufferUsage::PROTECTED) {
295 bufferUsage |= BO_USE_LINEAR;
297 if (grallocUsage & BufferUsage::COMPOSER_CURSOR) {
298 bufferUsage |= BO_USE_NONE;
300 if (grallocUsage & BufferUsage::VIDEO_ENCODER) {
301 /*HACK: See b/30054495 */
302 bufferUsage |= BO_USE_SW_READ_OFTEN;
304 if (grallocUsage & BufferUsage::CAMERA_OUTPUT) {
305 bufferUsage |= BO_USE_CAMERA_WRITE;
307 if (grallocUsage & BufferUsage::CAMERA_INPUT) {
308 bufferUsage |= BO_USE_CAMERA_READ;
310 if (grallocUsage & BufferUsage::RENDERSCRIPT) {
311 bufferUsage |= BO_USE_RENDERSCRIPT;
313 if (grallocUsage & BufferUsage::VIDEO_DECODER) {
314 bufferUsage |= BO_USE_HW_VIDEO_DECODER;
317 *outBufferUsage = bufferUsage;
321 int convertToCrosDescriptor(const BufferDescriptorInfo& descriptor,
322 struct cros_gralloc_buffer_descriptor* outCrosDescriptor) {
323 outCrosDescriptor->name = descriptor.name;
324 outCrosDescriptor->width = descriptor.width;
325 outCrosDescriptor->height = descriptor.height;
326 outCrosDescriptor->droid_format = static_cast<int32_t>(descriptor.format);
327 outCrosDescriptor->droid_usage = descriptor.usage;
328 outCrosDescriptor->reserved_region_size = descriptor.reservedSize;
329 if (descriptor.layerCount > 1) {
330 drv_log("Failed to convert descriptor. Unsupported layerCount: %d\n",
331 descriptor.layerCount);
334 if (convertToDrmFormat(descriptor.format, &outCrosDescriptor->drm_format)) {
335 std::string pixelFormatString = getPixelFormatString(descriptor.format);
336 drv_log("Failed to convert descriptor. Unsupported format %s\n", pixelFormatString.c_str());
339 if (convertToBufferUsage(descriptor.usage, &outCrosDescriptor->use_flags)) {
340 std::string usageString = getUsageString(descriptor.usage);
341 drv_log("Failed to convert descriptor. Unsupported usage flags %s\n", usageString.c_str());
347 int convertToMapUsage(uint64_t grallocUsage, uint32_t* outMapUsage) {
348 uint32_t mapUsage = BO_MAP_NONE;
350 if (grallocUsage & BufferUsage::CPU_READ_MASK) {
351 mapUsage |= BO_MAP_READ;
353 if (grallocUsage & BufferUsage::CPU_WRITE_MASK) {
354 mapUsage |= BO_MAP_WRITE;
357 *outMapUsage = mapUsage;
361 int convertToFenceFd(const hidl_handle& fenceHandle, int* outFenceFd) {
366 const native_handle_t* nativeHandle = fenceHandle.getNativeHandle();
367 if (nativeHandle && nativeHandle->numFds > 1) {
371 *outFenceFd = (nativeHandle && nativeHandle->numFds == 1) ? nativeHandle->data[0] : -1;
375 int convertToFenceHandle(int fenceFd, hidl_handle* outFenceHandle) {
376 if (!outFenceHandle) {
383 NATIVE_HANDLE_DECLARE_STORAGE(handleStorage, 1, 0);
384 auto fenceHandle = native_handle_init(handleStorage, 1, 0);
385 fenceHandle->data[0] = fenceFd;
387 *outFenceHandle = fenceHandle;
391 const std::unordered_map<uint32_t, std::vector<PlaneLayout>>& GetPlaneLayoutsMap() {
392 static const auto* kPlaneLayoutsMap =
393 new std::unordered_map<uint32_t, std::vector<PlaneLayout>>({
394 {DRM_FORMAT_ABGR8888,
396 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
399 {.type = android::gralloc4::PlaneLayoutComponentType_G,
402 {.type = android::gralloc4::PlaneLayoutComponentType_B,
405 {.type = android::gralloc4::PlaneLayoutComponentType_A,
408 .sampleIncrementInBits = 32,
409 .horizontalSubsampling = 1,
410 .verticalSubsampling = 1,
413 {DRM_FORMAT_ABGR2101010,
415 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
418 {.type = android::gralloc4::PlaneLayoutComponentType_G,
421 {.type = android::gralloc4::PlaneLayoutComponentType_B,
424 {.type = android::gralloc4::PlaneLayoutComponentType_A,
427 .sampleIncrementInBits = 32,
428 .horizontalSubsampling = 1,
429 .verticalSubsampling = 1,
432 {DRM_FORMAT_ABGR16161616F,
434 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
437 {.type = android::gralloc4::PlaneLayoutComponentType_G,
440 {.type = android::gralloc4::PlaneLayoutComponentType_B,
443 {.type = android::gralloc4::PlaneLayoutComponentType_A,
446 .sampleIncrementInBits = 64,
447 .horizontalSubsampling = 1,
448 .verticalSubsampling = 1,
451 {DRM_FORMAT_ARGB8888,
453 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_B,
456 {.type = android::gralloc4::PlaneLayoutComponentType_G,
459 {.type = android::gralloc4::PlaneLayoutComponentType_R,
462 {.type = android::gralloc4::PlaneLayoutComponentType_A,
465 .sampleIncrementInBits = 32,
466 .horizontalSubsampling = 1,
467 .verticalSubsampling = 1,
470 {DRM_FORMAT_ARGB4444,
472 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
475 {.type = android::gralloc4::PlaneLayoutComponentType_G,
478 {.type = android::gralloc4::PlaneLayoutComponentType_B,
481 {.type = android::gralloc4::PlaneLayoutComponentType_A,
484 .sampleIncrementInBits = 16,
485 .horizontalSubsampling = 1,
486 .verticalSubsampling = 1,
491 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_Y,
494 .sampleIncrementInBits = 8,
495 .horizontalSubsampling = 1,
496 .verticalSubsampling = 1,
500 {{.type = android::gralloc4::PlaneLayoutComponentType_CB,
503 {.type = android::gralloc4::PlaneLayoutComponentType_CR,
506 .sampleIncrementInBits = 16,
507 .horizontalSubsampling = 2,
508 .verticalSubsampling = 2,
513 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_Y,
516 .sampleIncrementInBits = 8,
517 .horizontalSubsampling = 1,
518 .verticalSubsampling = 1,
522 {{.type = android::gralloc4::PlaneLayoutComponentType_CR,
525 {.type = android::gralloc4::PlaneLayoutComponentType_CB,
528 .sampleIncrementInBits = 16,
529 .horizontalSubsampling = 2,
530 .verticalSubsampling = 2,
535 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_Y,
538 .sampleIncrementInBits = 16,
539 .horizontalSubsampling = 1,
540 .verticalSubsampling = 1,
544 {{.type = android::gralloc4::PlaneLayoutComponentType_CB,
547 {.type = android::gralloc4::PlaneLayoutComponentType_CR,
550 .sampleIncrementInBits = 32,
551 .horizontalSubsampling = 2,
552 .verticalSubsampling = 2,
557 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
560 .sampleIncrementInBits = 8,
561 .horizontalSubsampling = 1,
562 .verticalSubsampling = 1,
567 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
570 .sampleIncrementInBits = 16,
571 .horizontalSubsampling = 1,
572 .verticalSubsampling = 1,
577 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
580 {.type = android::gralloc4::PlaneLayoutComponentType_G,
583 {.type = android::gralloc4::PlaneLayoutComponentType_B,
586 .sampleIncrementInBits = 16,
587 .horizontalSubsampling = 1,
588 .verticalSubsampling = 1,
593 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_R,
596 {.type = android::gralloc4::PlaneLayoutComponentType_G,
599 {.type = android::gralloc4::PlaneLayoutComponentType_B,
602 .sampleIncrementInBits = 24,
603 .horizontalSubsampling = 1,
604 .verticalSubsampling = 1,
607 {DRM_FORMAT_XBGR8888,
609 .components = {{.type = android::gralloc4::PlaneLayoutComponentType_B,
612 {.type = android::gralloc4::PlaneLayoutComponentType_G,
615 {.type = android::gralloc4::PlaneLayoutComponentType_R,
618 .sampleIncrementInBits = 32,
619 .horizontalSubsampling = 1,
620 .verticalSubsampling = 1,
626 .components = {{.type = android::gralloc4::
627 PlaneLayoutComponentType_Y,
630 .sampleIncrementInBits = 8,
631 .horizontalSubsampling = 1,
632 .verticalSubsampling = 1,
635 .components = {{.type = android::gralloc4::
636 PlaneLayoutComponentType_CB,
639 .sampleIncrementInBits = 8,
640 .horizontalSubsampling = 2,
641 .verticalSubsampling = 2,
644 .components = {{.type = android::gralloc4::
645 PlaneLayoutComponentType_CR,
648 .sampleIncrementInBits = 8,
649 .horizontalSubsampling = 2,
650 .verticalSubsampling = 2,
654 {DRM_FORMAT_YVU420_ANDROID,
657 .components = {{.type = android::gralloc4::
658 PlaneLayoutComponentType_Y,
661 .sampleIncrementInBits = 8,
662 .horizontalSubsampling = 1,
663 .verticalSubsampling = 1,
666 .components = {{.type = android::gralloc4::
667 PlaneLayoutComponentType_CR,
670 .sampleIncrementInBits = 8,
671 .horizontalSubsampling = 2,
672 .verticalSubsampling = 2,
675 .components = {{.type = android::gralloc4::
676 PlaneLayoutComponentType_CB,
679 .sampleIncrementInBits = 8,
680 .horizontalSubsampling = 2,
681 .verticalSubsampling = 2,
685 return *kPlaneLayoutsMap;
688 int getPlaneLayouts(uint32_t drmFormat, std::vector<PlaneLayout>* outPlaneLayouts) {
689 const auto& planeLayoutsMap = GetPlaneLayoutsMap();
690 const auto it = planeLayoutsMap.find(drmFormat);
691 if (it == planeLayoutsMap.end()) {
692 drv_log("Unknown plane layout for format %d\n", drmFormat);
696 *outPlaneLayouts = it->second;