2 * Copyright 2016 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
22 extern struct backend backend_amdgpu;
24 extern struct backend backend_cirrus;
25 extern struct backend backend_evdi;
27 extern struct backend backend_exynos;
29 extern struct backend backend_gma500;
31 extern struct backend backend_i915;
34 extern struct backend backend_marvell;
37 extern struct backend backend_mediatek;
39 extern struct backend backend_nouveau;
41 extern struct backend backend_radeon;
44 extern struct backend backend_rockchip;
47 extern struct backend backend_tegra;
49 extern struct backend backend_udl;
51 extern struct backend backend_vc4;
53 extern struct backend backend_vgem;
54 extern struct backend backend_virtio_gpu;
56 static struct backend *drv_get_backend(int fd)
58 drmVersionPtr drm_version;
61 drm_version = drmGetVersion(fd);
66 struct backend *backend_list[] = {
70 &backend_cirrus, &backend_evdi,
98 &backend_vgem, &backend_virtio_gpu,
101 for (i = 0; i < ARRAY_SIZE(backend_list); i++)
102 if (!strcmp(drm_version->name, backend_list[i]->name)) {
103 drmFreeVersion(drm_version);
104 return backend_list[i];
107 drmFreeVersion(drm_version);
111 struct driver *drv_create(int fd)
116 drv = (struct driver *)calloc(1, sizeof(*drv));
122 drv->backend = drv_get_backend(fd);
127 if (pthread_mutex_init(&drv->driver_lock, NULL))
130 drv->buffer_table = drmHashCreate();
131 if (!drv->buffer_table)
134 drv->map_table = drmHashCreate();
136 goto free_buffer_table;
138 /* Start with a power of 2 number of allocations. */
139 drv->backend->combos.allocations = 2;
140 drv->backend->combos.size = 0;
141 drv->backend->combos.data =
142 calloc(drv->backend->combos.allocations, sizeof(struct combination));
143 if (!drv->backend->combos.data)
146 if (drv->backend->init) {
147 ret = drv->backend->init(drv);
149 free(drv->backend->combos.data);
157 drmHashDestroy(drv->map_table);
159 drmHashDestroy(drv->buffer_table);
161 pthread_mutex_destroy(&drv->driver_lock);
167 void drv_destroy(struct driver *drv)
169 pthread_mutex_lock(&drv->driver_lock);
171 if (drv->backend->close)
172 drv->backend->close(drv);
174 drmHashDestroy(drv->buffer_table);
175 drmHashDestroy(drv->map_table);
177 free(drv->backend->combos.data);
179 pthread_mutex_unlock(&drv->driver_lock);
180 pthread_mutex_destroy(&drv->driver_lock);
185 int drv_get_fd(struct driver *drv)
190 const char *drv_get_name(struct driver *drv)
192 return drv->backend->name;
195 struct combination *drv_get_combination(struct driver *drv, uint32_t format, uint64_t usage)
197 struct combination *curr, *best;
199 if (format == DRM_FORMAT_NONE || usage == BO_USE_NONE)
204 for (i = 0; i < drv->backend->combos.size; i++) {
205 curr = &drv->backend->combos.data[i];
206 if ((format == curr->format) && usage == (curr->usage & usage))
207 if (!best || best->metadata.priority < curr->metadata.priority)
214 struct bo *drv_bo_new(struct driver *drv, uint32_t width, uint32_t height, uint32_t format)
218 bo = (struct bo *)calloc(1, sizeof(*bo));
227 bo->num_planes = drv_num_planes_from_format(format);
229 if (!bo->num_planes) {
237 struct bo *drv_bo_create(struct driver *drv, uint32_t width, uint32_t height, uint32_t format,
244 bo = drv_bo_new(drv, width, height, format);
249 ret = drv->backend->bo_create(bo, width, height, format, flags);
256 pthread_mutex_lock(&drv->driver_lock);
258 for (plane = 0; plane < bo->num_planes; plane++)
259 drv_increment_reference_count(drv, bo, plane);
261 pthread_mutex_unlock(&drv->driver_lock);
266 struct bo *drv_bo_create_with_modifiers(struct driver *drv, uint32_t width, uint32_t height,
267 uint32_t format, const uint64_t *modifiers, uint32_t count)
273 if (!drv->backend->bo_create_with_modifiers) {
278 bo = drv_bo_new(drv, width, height, format);
283 ret = drv->backend->bo_create_with_modifiers(bo, width, height, format, modifiers, count);
290 pthread_mutex_lock(&drv->driver_lock);
292 for (plane = 0; plane < bo->num_planes; plane++)
293 drv_increment_reference_count(drv, bo, plane);
295 pthread_mutex_unlock(&drv->driver_lock);
300 void drv_bo_destroy(struct bo *bo)
304 struct driver *drv = bo->drv;
306 pthread_mutex_lock(&drv->driver_lock);
308 for (plane = 0; plane < bo->num_planes; plane++)
309 drv_decrement_reference_count(drv, bo, plane);
311 for (plane = 0; plane < bo->num_planes; plane++)
312 total += drv_get_reference_count(drv, bo, plane);
314 pthread_mutex_unlock(&drv->driver_lock);
317 bo->drv->backend->bo_destroy(bo);
322 struct bo *drv_bo_import(struct driver *drv, struct drv_import_fd_data *data)
328 bo = drv_bo_new(drv, data->width, data->height, data->format);
333 ret = drv->backend->bo_import(bo, data);
339 for (plane = 0; plane < bo->num_planes; plane++) {
340 bo->strides[plane] = data->strides[plane];
341 bo->offsets[plane] = data->offsets[plane];
342 bo->sizes[plane] = data->sizes[plane];
343 bo->format_modifiers[plane] = data->format_modifiers[plane];
344 bo->total_size += data->sizes[plane];
350 void *drv_bo_map(struct bo *bo, uint32_t x, uint32_t y, uint32_t width, uint32_t height,
351 uint32_t flags, struct map_info **map_data, size_t plane)
356 struct map_info *data;
361 assert(x + width <= drv_bo_get_width(bo));
362 assert(y + height <= drv_bo_get_height(bo));
363 assert(BO_TRANSFER_READ_WRITE & flags);
365 pthread_mutex_lock(&bo->drv->driver_lock);
367 if (!drmHashLookup(bo->drv->map_table, bo->handles[plane].u32, &ptr)) {
368 data = (struct map_info *)ptr;
373 data = calloc(1, sizeof(*data));
374 prot = BO_TRANSFER_WRITE & flags ? PROT_WRITE | PROT_READ : PROT_READ;
375 addr = bo->drv->backend->bo_map(bo, data, plane, prot);
376 if (addr == MAP_FAILED) {
379 pthread_mutex_unlock(&bo->drv->driver_lock);
385 data->handle = bo->handles[plane].u32;
386 drmHashInsert(bo->drv->map_table, bo->handles[plane].u32, (void *)data);
390 offset = drv_bo_get_plane_stride(bo, plane) * y;
391 offset += drv_stride_from_format(bo->format, x, plane);
392 addr = (uint8_t *)data->addr;
393 addr += drv_bo_get_plane_offset(bo, plane) + offset;
394 pthread_mutex_unlock(&bo->drv->driver_lock);
399 int drv_bo_unmap(struct bo *bo, struct map_info *data)
404 assert(data->refcount >= 0);
406 pthread_mutex_lock(&bo->drv->driver_lock);
408 if (!--data->refcount) {
409 if (bo->drv->backend->bo_unmap)
410 ret = bo->drv->backend->bo_unmap(bo, data);
412 ret = munmap(data->addr, data->length);
413 drmHashDelete(bo->drv->map_table, data->handle);
417 pthread_mutex_unlock(&bo->drv->driver_lock);
422 uint32_t drv_bo_get_width(struct bo *bo)
427 uint32_t drv_bo_get_height(struct bo *bo)
432 uint32_t drv_bo_get_stride_or_tiling(struct bo *bo)
434 return bo->tiling ? bo->tiling : drv_bo_get_plane_stride(bo, 0);
437 size_t drv_bo_get_num_planes(struct bo *bo)
439 return bo->num_planes;
442 union bo_handle drv_bo_get_plane_handle(struct bo *bo, size_t plane)
444 return bo->handles[plane];
448 #define DRM_RDWR O_RDWR
451 int drv_bo_get_plane_fd(struct bo *bo, size_t plane)
455 assert(plane < bo->num_planes);
457 ret = drmPrimeHandleToFD(bo->drv->fd, bo->handles[plane].u32, DRM_CLOEXEC | DRM_RDWR, &fd);
459 return (ret) ? ret : fd;
462 uint32_t drv_bo_get_plane_offset(struct bo *bo, size_t plane)
464 assert(plane < bo->num_planes);
465 return bo->offsets[plane];
468 uint32_t drv_bo_get_plane_size(struct bo *bo, size_t plane)
470 assert(plane < bo->num_planes);
471 return bo->sizes[plane];
474 uint32_t drv_bo_get_plane_stride(struct bo *bo, size_t plane)
476 assert(plane < bo->num_planes);
477 return bo->strides[plane];
480 uint64_t drv_bo_get_plane_format_modifier(struct bo *bo, size_t plane)
482 assert(plane < bo->num_planes);
483 return bo->format_modifiers[plane];
486 uint32_t drv_bo_get_format(struct bo *bo)
491 uint32_t drv_resolve_format(struct driver *drv, uint32_t format, uint64_t usage)
493 if (drv->backend->resolve_format)
494 return drv->backend->resolve_format(format, usage);
499 size_t drv_num_planes_from_format(uint32_t format)
502 case DRM_FORMAT_ABGR1555:
503 case DRM_FORMAT_ABGR2101010:
504 case DRM_FORMAT_ABGR4444:
505 case DRM_FORMAT_ABGR8888:
506 case DRM_FORMAT_ARGB1555:
507 case DRM_FORMAT_ARGB2101010:
508 case DRM_FORMAT_ARGB4444:
509 case DRM_FORMAT_ARGB8888:
510 case DRM_FORMAT_AYUV:
511 case DRM_FORMAT_BGR233:
512 case DRM_FORMAT_BGR565:
513 case DRM_FORMAT_BGR888:
514 case DRM_FORMAT_BGRA1010102:
515 case DRM_FORMAT_BGRA4444:
516 case DRM_FORMAT_BGRA5551:
517 case DRM_FORMAT_BGRA8888:
518 case DRM_FORMAT_BGRX1010102:
519 case DRM_FORMAT_BGRX4444:
520 case DRM_FORMAT_BGRX5551:
521 case DRM_FORMAT_BGRX8888:
523 case DRM_FORMAT_GR88:
525 case DRM_FORMAT_RG88:
526 case DRM_FORMAT_RGB332:
527 case DRM_FORMAT_RGB565:
528 case DRM_FORMAT_RGB888:
529 case DRM_FORMAT_RGBA1010102:
530 case DRM_FORMAT_RGBA4444:
531 case DRM_FORMAT_RGBA5551:
532 case DRM_FORMAT_RGBA8888:
533 case DRM_FORMAT_RGBX1010102:
534 case DRM_FORMAT_RGBX4444:
535 case DRM_FORMAT_RGBX5551:
536 case DRM_FORMAT_RGBX8888:
537 case DRM_FORMAT_UYVY:
538 case DRM_FORMAT_VYUY:
539 case DRM_FORMAT_XBGR1555:
540 case DRM_FORMAT_XBGR2101010:
541 case DRM_FORMAT_XBGR4444:
542 case DRM_FORMAT_XBGR8888:
543 case DRM_FORMAT_XRGB1555:
544 case DRM_FORMAT_XRGB2101010:
545 case DRM_FORMAT_XRGB4444:
546 case DRM_FORMAT_XRGB8888:
547 case DRM_FORMAT_YUYV:
548 case DRM_FORMAT_YVYU:
550 case DRM_FORMAT_NV12:
551 case DRM_FORMAT_NV21:
553 case DRM_FORMAT_YVU420:
554 case DRM_FORMAT_YVU420_ANDROID:
558 fprintf(stderr, "drv: UNKNOWN FORMAT %d\n", format);
562 uint32_t drv_size_from_format(uint32_t format, uint32_t stride, uint32_t height, size_t plane)
564 assert(plane < drv_num_planes_from_format(format));
565 uint32_t vertical_subsampling;
568 case DRM_FORMAT_NV12:
569 case DRM_FORMAT_YVU420:
570 case DRM_FORMAT_YVU420_ANDROID:
571 vertical_subsampling = (plane == 0) ? 1 : 2;
574 vertical_subsampling = 1;
577 return stride * DIV_ROUND_UP(height, vertical_subsampling);
580 uint32_t drv_num_buffers_per_bo(struct bo *bo)
585 for (plane = 0; plane < bo->num_planes; plane++) {
586 for (p = 0; p < plane; p++)
587 if (bo->handles[p].u32 == bo->handles[plane].u32)