2 * Copyright 2016 The Chromium OS Authors. All rights reserved.
3 * Use of this source code is governed by a BSD-style license that can be
4 * found in the LICENSE file.
14 #include <sys/types.h>
21 #include "i915_private.h"
24 extern struct backend backend_amdgpu;
26 extern struct backend backend_cirrus;
27 extern struct backend backend_evdi;
29 extern struct backend backend_exynos;
31 extern struct backend backend_gma500;
33 extern struct backend backend_i915;
36 extern struct backend backend_marvell;
39 extern struct backend backend_mediatek;
41 extern struct backend backend_nouveau;
43 extern struct backend backend_radeon;
46 extern struct backend backend_rockchip;
49 extern struct backend backend_tegra;
51 extern struct backend backend_udl;
53 extern struct backend backend_vc4;
55 extern struct backend backend_vgem;
56 extern struct backend backend_virtio_gpu;
58 static struct backend *drv_get_backend(int fd)
60 drmVersionPtr drm_version;
63 drm_version = drmGetVersion(fd);
68 struct backend *backend_list[] = {
72 &backend_cirrus, &backend_evdi,
100 &backend_vgem, &backend_virtio_gpu,
103 for (i = 0; i < ARRAY_SIZE(backend_list); i++)
104 if (!strcmp(drm_version->name, backend_list[i]->name)) {
105 drmFreeVersion(drm_version);
106 return backend_list[i];
109 drmFreeVersion(drm_version);
113 struct driver *drv_create(int fd)
118 drv = (struct driver *)calloc(1, sizeof(*drv));
124 drv->backend = drv_get_backend(fd);
129 drv->buffer_table = drmHashCreate();
130 if (!drv->buffer_table)
133 drv->map_table = drmHashCreate();
135 goto free_buffer_table;
137 /* Start with a power of 2 number of allocations. */
138 drv->combos.allocations = 2;
139 drv->combos.size = 0;
141 drv->combos.data = calloc(drv->combos.allocations, sizeof(struct combination));
142 if (!drv->combos.data)
145 if (drv->backend->init) {
146 ret = drv->backend->init(drv);
148 free(drv->combos.data);
153 ATOMIC_VAR_INIT(drv->driver_lock);
158 drmHashDestroy(drv->map_table);
160 drmHashDestroy(drv->buffer_table);
166 void drv_destroy(struct driver *drv)
168 ATOMIC_LOCK(&drv->driver_lock);
170 if (drv->backend->close)
171 drv->backend->close(drv);
173 drmHashDestroy(drv->buffer_table);
174 drmHashDestroy(drv->map_table);
176 free(drv->combos.data);
178 ATOMIC_UNLOCK(&drv->driver_lock);
183 int drv_get_fd(struct driver *drv)
188 const char *drv_get_name(struct driver *drv)
190 return drv->backend->name;
193 struct combination *drv_get_combination(struct driver *drv, uint32_t format, uint64_t use_flags)
195 struct combination *curr, *best;
197 if (format == DRM_FORMAT_NONE || use_flags == BO_USE_NONE)
202 for (i = 0; i < drv->combos.size; i++) {
203 curr = &drv->combos.data[i];
204 if ((format == curr->format) && use_flags == (curr->use_flags & use_flags))
205 if (!best || best->metadata.priority < curr->metadata.priority)
212 struct bo *drv_bo_new(struct driver *drv, uint32_t width, uint32_t height, uint32_t format,
217 bo = (struct bo *)calloc(1, sizeof(*bo));
226 bo->use_flags = use_flags;
227 bo->num_planes = drv_num_planes_from_format(format);
229 if (!bo->num_planes) {
237 struct bo *drv_bo_create(struct driver *drv, uint32_t width, uint32_t height, uint32_t format,
244 bo = drv_bo_new(drv, width, height, format, use_flags);
249 ret = drv->backend->bo_create(bo, width, height, format, use_flags);
256 ATOMIC_LOCK(&drv->driver_lock);
258 for (plane = 0; plane < bo->num_planes; plane++) {
260 assert(bo->offsets[plane] >= bo->offsets[plane - 1]);
262 drv_increment_reference_count(drv, bo, plane);
265 ATOMIC_UNLOCK(&drv->driver_lock);
270 struct bo *drv_bo_create_with_modifiers(struct driver *drv, uint32_t width, uint32_t height,
271 uint32_t format, const uint64_t *modifiers, uint32_t count)
277 if (!drv->backend->bo_create_with_modifiers) {
282 bo = drv_bo_new(drv, width, height, format, BO_USE_NONE);
287 ret = drv->backend->bo_create_with_modifiers(bo, width, height, format, modifiers, count);
294 ATOMIC_LOCK(&drv->driver_lock);
296 for (plane = 0; plane < bo->num_planes; plane++) {
298 assert(bo->offsets[plane] >= bo->offsets[plane - 1]);
300 drv_increment_reference_count(drv, bo, plane);
303 ATOMIC_UNLOCK(&drv->driver_lock);
308 void drv_bo_destroy(struct bo *bo)
312 struct driver *drv = bo->drv;
314 ATOMIC_LOCK(&drv->driver_lock);
316 for (plane = 0; plane < bo->num_planes; plane++)
317 drv_decrement_reference_count(drv, bo, plane);
319 for (plane = 0; plane < bo->num_planes; plane++)
320 total += drv_get_reference_count(drv, bo, plane);
322 ATOMIC_UNLOCK(&drv->driver_lock);
325 assert(drv_map_info_destroy(bo) == 0);
326 bo->drv->backend->bo_destroy(bo);
332 struct bo *drv_bo_import(struct driver *drv, struct drv_import_fd_data *data)
339 bo = drv_bo_new(drv, data->width, data->height, data->format, data->use_flags);
344 ret = drv->backend->bo_import(bo, data);
350 for (plane = 0; plane < bo->num_planes; plane++) {
351 bo->strides[plane] = data->strides[plane];
352 bo->offsets[plane] = data->offsets[plane];
353 bo->format_modifiers[plane] = data->format_modifiers[plane];
355 seek_end = lseek(data->fds[plane], 0, SEEK_END);
356 if (seek_end == (off_t)(-1)) {
357 fprintf(stderr, "drv: lseek() failed with %s\n", strerror(errno));
361 lseek(data->fds[plane], 0, SEEK_SET);
362 if (plane == bo->num_planes - 1 || data->offsets[plane + 1] == 0)
363 bo->sizes[plane] = seek_end - data->offsets[plane];
365 bo->sizes[plane] = data->offsets[plane + 1] - data->offsets[plane];
367 if ((int64_t)bo->offsets[plane] + bo->sizes[plane] > seek_end) {
368 fprintf(stderr, "drv: buffer size is too large.\n");
372 bo->total_size += bo->sizes[plane];
382 void *drv_bo_map(struct bo *bo, uint32_t x, uint32_t y, uint32_t width, uint32_t height,
383 uint32_t map_flags, struct map_info **map_data, size_t plane)
388 struct map_info *data;
392 assert(x + width <= drv_bo_get_width(bo));
393 assert(y + height <= drv_bo_get_height(bo));
394 assert(BO_MAP_READ_WRITE & map_flags);
395 /* No CPU access for protected buffers. */
396 assert(!(bo->use_flags & BO_USE_PROTECTED));
398 ATOMIC_LOCK(&bo->drv->driver_lock);
400 if (!drmHashLookup(bo->drv->map_table, bo->handles[plane].u32, &ptr)) {
401 data = (struct map_info *)ptr;
402 /* TODO(gsingh): support mapping same buffer with different flags. */
403 assert(data->map_flags == map_flags);
408 data = calloc(1, sizeof(*data));
409 addr = bo->drv->backend->bo_map(bo, data, plane, map_flags);
410 if (addr == MAP_FAILED) {
413 ATOMIC_UNLOCK(&bo->drv->driver_lock);
419 data->handle = bo->handles[plane].u32;
420 data->map_flags = map_flags;
421 drmHashInsert(bo->drv->map_table, bo->handles[plane].u32, (void *)data);
424 drv_bo_invalidate(bo, data);
426 offset = drv_bo_get_plane_stride(bo, plane) * y;
427 offset += drv_stride_from_format(bo->format, x, plane);
428 addr = (uint8_t *)data->addr;
429 addr += drv_bo_get_plane_offset(bo, plane) + offset;
430 ATOMIC_UNLOCK(&bo->drv->driver_lock);
435 int drv_bo_unmap(struct bo *bo, struct map_info *data)
437 int ret = drv_bo_flush(bo, data);
441 ATOMIC_LOCK(&bo->drv->driver_lock);
443 if (!--data->refcount) {
444 ret = bo->drv->backend->bo_unmap(bo, data);
445 drmHashDelete(bo->drv->map_table, data->handle);
449 ATOMIC_UNLOCK(&bo->drv->driver_lock);;
454 int drv_bo_invalidate(struct bo *bo, struct map_info *data)
458 assert(data->refcount >= 0);
460 if (bo->drv->backend->bo_invalidate)
461 ret = bo->drv->backend->bo_invalidate(bo, data);
466 int drv_bo_flush(struct bo *bo, struct map_info *data)
470 assert(data->refcount >= 0);
471 assert(!(bo->use_flags & BO_USE_PROTECTED));
473 if (bo->drv->backend->bo_flush)
474 ret = bo->drv->backend->bo_flush(bo, data);
479 uint32_t drv_bo_get_width(struct bo *bo)
484 uint32_t drv_bo_get_height(struct bo *bo)
489 uint32_t drv_bo_get_stride_or_tiling(struct bo *bo)
491 return bo->tiling ? bo->tiling : drv_bo_get_plane_stride(bo, 0);
494 size_t drv_bo_get_num_planes(struct bo *bo)
496 return bo->num_planes;
499 union bo_handle drv_bo_get_plane_handle(struct bo *bo, size_t plane)
501 return bo->handles[plane];
505 #define DRM_RDWR O_RDWR
508 int drv_bo_get_plane_fd(struct bo *bo, size_t plane)
512 assert(plane < bo->num_planes);
514 ret = drmPrimeHandleToFD(bo->drv->fd, bo->handles[plane].u32, DRM_CLOEXEC | DRM_RDWR, &fd);
516 return (ret) ? ret : fd;
519 uint32_t drv_bo_get_plane_offset(struct bo *bo, size_t plane)
521 assert(plane < bo->num_planes);
522 return bo->offsets[plane];
525 uint32_t drv_bo_get_plane_size(struct bo *bo, size_t plane)
527 assert(plane < bo->num_planes);
528 return bo->sizes[plane];
531 uint32_t drv_bo_get_plane_stride(struct bo *bo, size_t plane)
533 assert(plane < bo->num_planes);
534 return bo->strides[plane];
537 uint64_t drv_bo_get_plane_format_modifier(struct bo *bo, size_t plane)
539 assert(plane < bo->num_planes);
540 return bo->format_modifiers[plane];
543 uint32_t drv_bo_get_format(struct bo *bo)
548 uint32_t drv_resolve_format(struct driver *drv, uint32_t format, uint64_t use_flags)
550 if (drv->backend->resolve_format)
551 return drv->backend->resolve_format(format, use_flags);
556 size_t drv_num_planes_from_format(uint32_t format)
559 case DRM_FORMAT_ABGR1555:
560 case DRM_FORMAT_ABGR2101010:
561 case DRM_FORMAT_ABGR4444:
562 case DRM_FORMAT_ABGR8888:
563 case DRM_FORMAT_ARGB1555:
564 case DRM_FORMAT_ARGB2101010:
565 case DRM_FORMAT_ARGB4444:
566 case DRM_FORMAT_ARGB8888:
567 case DRM_FORMAT_AYUV:
568 case DRM_FORMAT_BGR233:
569 case DRM_FORMAT_BGR565:
570 case DRM_FORMAT_BGR888:
571 case DRM_FORMAT_BGRA1010102:
572 case DRM_FORMAT_BGRA4444:
573 case DRM_FORMAT_BGRA5551:
574 case DRM_FORMAT_BGRA8888:
575 case DRM_FORMAT_BGRX1010102:
576 case DRM_FORMAT_BGRX4444:
577 case DRM_FORMAT_BGRX5551:
578 case DRM_FORMAT_BGRX8888:
580 case DRM_FORMAT_GR88:
582 case DRM_FORMAT_RG88:
583 case DRM_FORMAT_RGB332:
584 case DRM_FORMAT_RGB565:
585 case DRM_FORMAT_RGB888:
586 case DRM_FORMAT_RGBA1010102:
587 case DRM_FORMAT_RGBA4444:
588 case DRM_FORMAT_RGBA5551:
589 case DRM_FORMAT_RGBA8888:
590 case DRM_FORMAT_RGBX1010102:
591 case DRM_FORMAT_RGBX4444:
592 case DRM_FORMAT_RGBX5551:
593 case DRM_FORMAT_RGBX8888:
594 case DRM_FORMAT_UYVY:
595 case DRM_FORMAT_VYUY:
596 case DRM_FORMAT_XBGR1555:
597 case DRM_FORMAT_XBGR2101010:
598 case DRM_FORMAT_XBGR4444:
599 case DRM_FORMAT_XBGR8888:
600 case DRM_FORMAT_XRGB1555:
601 case DRM_FORMAT_XRGB2101010:
602 case DRM_FORMAT_XRGB4444:
603 case DRM_FORMAT_XRGB8888:
604 case DRM_FORMAT_YUYV:
605 case DRM_FORMAT_YVYU:
607 case DRM_FORMAT_NV12:
608 case DRM_FORMAT_NV21:
610 case DRM_FORMAT_YVU420:
611 case DRM_FORMAT_YVU420_ANDROID:
615 return i915_private_num_planes_from_format(format);
618 uint32_t drv_num_buffers_per_bo(struct bo *bo)
623 for (plane = 0; plane < bo->num_planes; plane++) {
624 for (p = 0; p < plane; p++)
625 if (bo->handles[p].u32 == bo->handles[plane].u32)