2 * Copyright 2016 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
22 extern struct backend backend_amdgpu;
24 extern struct backend backend_cirrus;
25 extern struct backend backend_evdi;
27 extern struct backend backend_exynos;
29 extern struct backend backend_gma500;
31 extern struct backend backend_i915;
34 extern struct backend backend_marvell;
37 extern struct backend backend_mediatek;
39 extern struct backend backend_nouveau;
41 extern struct backend backend_rockchip;
44 extern struct backend backend_tegra;
46 extern struct backend backend_udl;
47 extern struct backend backend_vgem;
48 extern struct backend backend_virtio_gpu;
50 static struct backend *drv_get_backend(int fd)
52 drmVersionPtr drm_version;
55 drm_version = drmGetVersion(fd);
60 struct backend *backend_list[] = {
91 for(i = 0; i < ARRAY_SIZE(backend_list); i++)
92 if (!strcmp(drm_version->name, backend_list[i]->name)) {
93 drmFreeVersion(drm_version);
94 return backend_list[i];
97 drmFreeVersion(drm_version);
101 struct driver *drv_create(int fd)
106 drv = (struct driver *) calloc(1, sizeof(*drv));
112 drv->backend = drv_get_backend(fd);
117 if (pthread_mutex_init(&drv->driver_lock, NULL))
120 drv->buffer_table = drmHashCreate();
121 if (!drv->buffer_table)
124 drv->map_table = drmHashCreate();
126 goto free_buffer_table;
128 LIST_INITHEAD(&drv->backend->combinations);
130 if (drv->backend->init) {
131 ret = drv->backend->init(drv);
139 drmHashDestroy(drv->map_table);
141 drmHashDestroy(drv->buffer_table);
143 pthread_mutex_destroy(&drv->driver_lock);
149 void drv_destroy(struct driver *drv)
151 pthread_mutex_lock(&drv->driver_lock);
153 if (drv->backend->close)
154 drv->backend->close(drv);
156 drmHashDestroy(drv->buffer_table);
157 drmHashDestroy(drv->map_table);
159 list_for_each_entry_safe(struct combination_list_element, elem,
160 &drv->backend->combinations, link) {
161 LIST_DEL(&elem->link);
165 pthread_mutex_unlock(&drv->driver_lock);
166 pthread_mutex_destroy(&drv->driver_lock);
171 int drv_get_fd(struct driver *drv)
177 drv_get_name(struct driver *drv)
179 return drv->backend->name;
182 int drv_is_combination_supported(struct driver *drv, uint32_t format,
183 uint64_t usage, uint64_t modifier)
186 if (format == DRM_FORMAT_NONE || usage == BO_USE_NONE)
189 list_for_each_entry(struct combination_list_element, elem,
190 &drv->backend->combinations, link) {
191 if (format == elem->combination.format &&
192 usage == (elem->combination.usage & usage) &&
193 modifier == elem->combination.modifier)
200 struct bo *drv_bo_new(struct driver *drv, uint32_t width, uint32_t height,
205 bo = (struct bo *) calloc(1, sizeof(*bo));
214 bo->num_planes = drv_num_planes_from_format(format);
216 if (!bo->num_planes) {
224 struct bo *drv_bo_create(struct driver *drv, uint32_t width, uint32_t height,
225 uint32_t format, uint64_t flags)
231 bo = drv_bo_new(drv, width, height, format);
236 ret = drv->backend->bo_create(bo, width, height, format, flags);
243 pthread_mutex_lock(&drv->driver_lock);
245 for (plane = 0; plane < bo->num_planes; plane++)
246 drv_increment_reference_count(drv, bo, plane);
248 pthread_mutex_unlock(&drv->driver_lock);
253 struct bo *drv_bo_create_with_modifiers(struct driver *drv,
254 uint32_t width, uint32_t height,
256 const uint64_t *modifiers, uint32_t count)
262 if (!drv->backend->bo_create_with_modifiers) {
267 bo = drv_bo_new(drv, width, height, format);
272 ret = drv->backend->bo_create_with_modifiers(bo, width, height,
273 format, modifiers, count);
280 pthread_mutex_lock(&drv->driver_lock);
282 for (plane = 0; plane < bo->num_planes; plane++)
283 drv_increment_reference_count(drv, bo, plane);
285 pthread_mutex_unlock(&drv->driver_lock);
291 void drv_bo_destroy(struct bo *bo)
295 struct driver *drv = bo->drv;
297 pthread_mutex_lock(&drv->driver_lock);
299 for (plane = 0; plane < bo->num_planes; plane++)
300 drv_decrement_reference_count(drv, bo, plane);
302 for (plane = 0; plane < bo->num_planes; plane++)
303 total += drv_get_reference_count(drv, bo, plane);
305 pthread_mutex_unlock(&drv->driver_lock);
308 bo->drv->backend->bo_destroy(bo);
313 struct bo *drv_bo_import(struct driver *drv, struct drv_import_fd_data *data)
319 bo = drv_bo_new(drv, data->width, data->height, data->format);
324 ret = drv->backend->bo_import(bo, data);
330 for (plane = 0; plane < bo->num_planes; plane++) {
331 bo->strides[plane] = data->strides[plane];
332 bo->offsets[plane] = data->offsets[plane];
333 bo->sizes[plane] = data->sizes[plane];
334 bo->format_modifiers[plane] = data->format_modifiers[plane];
335 bo->total_size += data->sizes[plane];
341 void *drv_bo_map(struct bo *bo, uint32_t x, uint32_t y, uint32_t width,
342 uint32_t height, uint32_t flags, void **map_data, size_t plane)
347 struct map_info *data;
351 assert(x + width <= drv_bo_get_width(bo));
352 assert(y + height <= drv_bo_get_height(bo));
354 pthread_mutex_lock(&bo->drv->driver_lock);
356 if (!drmHashLookup(bo->drv->map_table, bo->handles[plane].u32, &ptr)) {
357 data = (struct map_info *) ptr;
362 data = calloc(1, sizeof(*data));
363 addr = bo->drv->backend->bo_map(bo, data, plane);
364 if (addr == MAP_FAILED) {
367 pthread_mutex_unlock(&bo->drv->driver_lock);
373 data->handle = bo->handles[plane].u32;
374 drmHashInsert(bo->drv->buffer_table, bo->handles[plane].u32,
378 *map_data = (void *) data;
379 offset = drv_bo_get_plane_stride(bo, plane) * y;
380 offset += drv_stride_from_format(bo->format, x, plane);
381 addr = (uint8_t *) data->addr;
382 addr += drv_bo_get_plane_offset(bo, plane) + offset;
383 pthread_mutex_unlock(&bo->drv->driver_lock);
385 return (void *) addr;
388 int drv_bo_unmap(struct bo *bo, void *map_data)
390 struct map_info *data = map_data;
394 assert(data->refcount >= 0);
396 pthread_mutex_lock(&bo->drv->driver_lock);
398 if (!--data->refcount) {
399 if (bo->drv->backend->bo_unmap)
400 ret = bo->drv->backend->bo_unmap(bo, data);
402 ret = munmap(data->addr, data->length);
403 drmHashDelete(bo->drv->map_table, data->handle);
407 pthread_mutex_unlock(&bo->drv->driver_lock);
412 uint32_t drv_bo_get_width(struct bo *bo)
417 uint32_t drv_bo_get_height(struct bo *bo)
422 uint32_t drv_bo_get_stride_or_tiling(struct bo *bo)
424 return bo->tiling ? bo->tiling : drv_bo_get_plane_stride(bo, 0);
427 size_t drv_bo_get_num_planes(struct bo *bo)
429 return bo->num_planes;
432 union bo_handle drv_bo_get_plane_handle(struct bo *bo, size_t plane)
434 return bo->handles[plane];
438 #define DRM_RDWR O_RDWR
441 int drv_bo_get_plane_fd(struct bo *bo, size_t plane)
445 assert(plane < bo->num_planes);
447 ret = drmPrimeHandleToFD(bo->drv->fd, bo->handles[plane].u32,
448 DRM_CLOEXEC | DRM_RDWR, &fd);
450 return (ret) ? ret : fd;
454 uint32_t drv_bo_get_plane_offset(struct bo *bo, size_t plane)
456 assert(plane < bo->num_planes);
457 return bo->offsets[plane];
460 uint32_t drv_bo_get_plane_size(struct bo *bo, size_t plane)
462 assert(plane < bo->num_planes);
463 return bo->sizes[plane];
466 uint32_t drv_bo_get_plane_stride(struct bo *bo, size_t plane)
468 assert(plane < bo->num_planes);
469 return bo->strides[plane];
472 uint64_t drv_bo_get_plane_format_modifier(struct bo *bo, size_t plane)
474 assert(plane < bo->num_planes);
475 return bo->format_modifiers[plane];
478 uint32_t drv_bo_get_format(struct bo *bo)
483 uint32_t drv_resolve_format(struct driver *drv, uint32_t format)
485 if (drv->backend->resolve_format)
486 return drv->backend->resolve_format(format);
492 * This function returns the stride for a given format, width and plane.
494 int drv_stride_from_format(uint32_t format, uint32_t width, size_t plane)
496 int stride = width * DIV_ROUND_UP(drv_bpp_from_format(format, plane),
500 * Only downsample for certain multiplanar formats which have horizontal
501 * subsampling for chroma planes. Only formats supported by our drivers
502 * are listed here -- add more as needed.
506 case DRM_FORMAT_NV12:
507 case DRM_FORMAT_YVU420:
516 size_t drv_num_planes_from_format(uint32_t format)
519 case DRM_FORMAT_ABGR1555:
520 case DRM_FORMAT_ABGR2101010:
521 case DRM_FORMAT_ABGR4444:
522 case DRM_FORMAT_ABGR8888:
523 case DRM_FORMAT_ARGB1555:
524 case DRM_FORMAT_ARGB2101010:
525 case DRM_FORMAT_ARGB4444:
526 case DRM_FORMAT_ARGB8888:
527 case DRM_FORMAT_AYUV:
528 case DRM_FORMAT_BGR233:
529 case DRM_FORMAT_BGR565:
530 case DRM_FORMAT_BGR888:
531 case DRM_FORMAT_BGRA1010102:
532 case DRM_FORMAT_BGRA4444:
533 case DRM_FORMAT_BGRA5551:
534 case DRM_FORMAT_BGRA8888:
535 case DRM_FORMAT_BGRX1010102:
536 case DRM_FORMAT_BGRX4444:
537 case DRM_FORMAT_BGRX5551:
538 case DRM_FORMAT_BGRX8888:
540 case DRM_FORMAT_GR88:
542 case DRM_FORMAT_RG88:
543 case DRM_FORMAT_RGB332:
544 case DRM_FORMAT_RGB565:
545 case DRM_FORMAT_RGB888:
546 case DRM_FORMAT_RGBA1010102:
547 case DRM_FORMAT_RGBA4444:
548 case DRM_FORMAT_RGBA5551:
549 case DRM_FORMAT_RGBA8888:
550 case DRM_FORMAT_RGBX1010102:
551 case DRM_FORMAT_RGBX4444:
552 case DRM_FORMAT_RGBX5551:
553 case DRM_FORMAT_RGBX8888:
554 case DRM_FORMAT_UYVY:
555 case DRM_FORMAT_VYUY:
556 case DRM_FORMAT_XBGR1555:
557 case DRM_FORMAT_XBGR2101010:
558 case DRM_FORMAT_XBGR4444:
559 case DRM_FORMAT_XBGR8888:
560 case DRM_FORMAT_XRGB1555:
561 case DRM_FORMAT_XRGB2101010:
562 case DRM_FORMAT_XRGB4444:
563 case DRM_FORMAT_XRGB8888:
564 case DRM_FORMAT_YUYV:
565 case DRM_FORMAT_YVYU:
567 case DRM_FORMAT_NV12:
569 case DRM_FORMAT_YVU420:
573 fprintf(stderr, "drv: UNKNOWN FORMAT %d\n", format);
577 uint32_t drv_size_from_format(uint32_t format, uint32_t stride,
578 uint32_t height, size_t plane)
580 assert(plane < drv_num_planes_from_format(format));
581 uint32_t vertical_subsampling;
584 case DRM_FORMAT_NV12:
585 case DRM_FORMAT_YVU420:
586 vertical_subsampling = (plane == 0) ? 1 : 2;
589 vertical_subsampling = 1;
592 return stride * DIV_ROUND_UP(height, vertical_subsampling);
595 uint32_t drv_num_buffers_per_bo(struct bo *bo)
600 for (plane = 0; plane < bo->num_planes; plane++) {
601 for (p = 0; p < plane; p++)
602 if (bo->handles[p].u32 == bo->handles[plane].u32)