3 #include "radeon_drm.h"
4 #include "radeon_bo_gem.h"
5 #include "radeon_cs_gem.h"
6 #include "radeon_buffer.h"
8 #include "util/u_inlines.h"
9 #include "util/u_memory.h"
10 #include "util/u_simple_list.h"
11 #include "pipebuffer/pb_buffer.h"
12 #include "pipebuffer/pb_bufmgr.h"
14 #include "radeon_winsys.h"
15 struct radeon_drm_bufmgr
;
17 struct radeon_drm_buffer
{
18 struct pb_buffer base
;
19 struct radeon_drm_bufmgr
*mgr
;
23 /* The CS associated with the last buffer_map. */
24 struct radeon_libdrm_cs
*cs
;
29 struct radeon_drm_buffer
*next
, *prev
;
32 extern const struct pb_vtbl radeon_drm_buffer_vtbl
;
35 static INLINE
struct radeon_drm_buffer
*
36 radeon_drm_buffer(struct pb_buffer
*buf
)
39 assert(buf
->vtbl
== &radeon_drm_buffer_vtbl
);
40 return (struct radeon_drm_buffer
*)buf
;
43 struct radeon_drm_bufmgr
{
44 struct pb_manager base
;
45 struct radeon_libdrm_winsys
*rws
;
46 struct radeon_drm_buffer buffer_map_list
;
49 static INLINE
struct radeon_drm_bufmgr
*
50 radeon_drm_bufmgr(struct pb_manager
*mgr
)
53 return (struct radeon_drm_bufmgr
*)mgr
;
57 radeon_drm_buffer_destroy(struct pb_buffer
*_buf
)
59 struct radeon_drm_buffer
*buf
= radeon_drm_buffer(_buf
);
61 if (buf
->bo
->ptr
!= NULL
) {
62 remove_from_list(buf
);
63 radeon_bo_unmap(buf
->bo
);
66 radeon_bo_unref(buf
->bo
);
71 static unsigned get_pb_usage_from_transfer_flags(enum pipe_transfer_usage usage
)
75 if (usage
& PIPE_TRANSFER_READ
)
76 res
|= PB_USAGE_CPU_READ
;
78 if (usage
& PIPE_TRANSFER_WRITE
)
79 res
|= PB_USAGE_CPU_WRITE
;
81 if (usage
& PIPE_TRANSFER_DONTBLOCK
)
82 res
|= PB_USAGE_DONTBLOCK
;
84 if (usage
& PIPE_TRANSFER_UNSYNCHRONIZED
)
85 res
|= PB_USAGE_UNSYNCHRONIZED
;
91 radeon_drm_buffer_map_internal(struct pb_buffer
*_buf
,
92 unsigned flags
, void *flush_ctx
)
94 struct radeon_drm_buffer
*buf
= radeon_drm_buffer(_buf
);
95 struct radeon_libdrm_cs
*cs
= flush_ctx
;
98 if (flags
& PB_USAGE_DONTBLOCK
) {
99 if (_buf
->base
.usage
& RADEON_PB_USAGE_VERTEX
)
100 if (cs
&& radeon_bo_is_referenced_by_cs(buf
->bo
, cs
->cs
))
104 if (buf
->bo
->ptr
!= NULL
)
107 if (flags
& PB_USAGE_DONTBLOCK
) {
109 if (radeon_bo_is_busy(buf
->bo
, &domain
))
113 if (cs
&& radeon_bo_is_referenced_by_cs(buf
->bo
, cs
->cs
)) {
114 cs
->flush_cs(cs
->flush_data
);
117 if (flags
& PB_USAGE_CPU_WRITE
) {
121 if (radeon_bo_map(buf
->bo
, write
)) {
124 insert_at_tail(&buf
->mgr
->buffer_map_list
, buf
);
129 radeon_drm_buffer_unmap_internal(struct pb_buffer
*_buf
)
135 radeon_drm_buffer_get_base_buffer(struct pb_buffer
*buf
,
136 struct pb_buffer
**base_buf
,
144 static enum pipe_error
145 radeon_drm_buffer_validate(struct pb_buffer
*_buf
,
146 struct pb_validate
*vl
,
154 radeon_drm_buffer_fence(struct pb_buffer
*buf
,
155 struct pipe_fence_handle
*fence
)
159 const struct pb_vtbl radeon_drm_buffer_vtbl
= {
160 radeon_drm_buffer_destroy
,
161 radeon_drm_buffer_map_internal
,
162 radeon_drm_buffer_unmap_internal
,
163 radeon_drm_buffer_validate
,
164 radeon_drm_buffer_fence
,
165 radeon_drm_buffer_get_base_buffer
,
168 struct pb_buffer
*radeon_drm_bufmgr_create_buffer_from_handle(struct pb_manager
*_mgr
,
171 struct radeon_drm_bufmgr
*mgr
= radeon_drm_bufmgr(_mgr
);
172 struct radeon_libdrm_winsys
*rws
= mgr
->rws
;
173 struct radeon_drm_buffer
*buf
;
174 struct radeon_bo
*bo
;
176 bo
= radeon_bo_open(rws
->bom
, handle
, 0,
181 buf
= CALLOC_STRUCT(radeon_drm_buffer
);
187 make_empty_list(buf
);
189 pipe_reference_init(&buf
->base
.base
.reference
, 1);
190 buf
->base
.base
.alignment
= 0;
191 buf
->base
.base
.usage
= PB_USAGE_GPU_WRITE
| PB_USAGE_GPU_READ
;
192 buf
->base
.base
.size
= bo
->size
;
193 buf
->base
.vtbl
= &radeon_drm_buffer_vtbl
;
201 static struct pb_buffer
*
202 radeon_drm_bufmgr_create_buffer(struct pb_manager
*_mgr
,
204 const struct pb_desc
*desc
)
206 struct radeon_drm_bufmgr
*mgr
= radeon_drm_bufmgr(_mgr
);
207 struct radeon_libdrm_winsys
*rws
= mgr
->rws
;
208 struct radeon_drm_buffer
*buf
;
211 buf
= CALLOC_STRUCT(radeon_drm_buffer
);
215 pipe_reference_init(&buf
->base
.base
.reference
, 1);
216 buf
->base
.base
.alignment
= desc
->alignment
;
217 buf
->base
.base
.usage
= desc
->usage
;
218 buf
->base
.base
.size
= size
;
219 buf
->base
.vtbl
= &radeon_drm_buffer_vtbl
;
222 make_empty_list(buf
);
225 (desc
->usage
& RADEON_PB_USAGE_DOMAIN_GTT
? RADEON_GEM_DOMAIN_GTT
: 0) |
226 (desc
->usage
& RADEON_PB_USAGE_DOMAIN_VRAM
? RADEON_GEM_DOMAIN_VRAM
: 0);
228 buf
->bo
= radeon_bo_open(rws
->bom
, 0, size
,
229 desc
->alignment
, domain
, 0);
242 radeon_drm_bufmgr_flush(struct pb_manager
*mgr
)
248 radeon_drm_bufmgr_destroy(struct pb_manager
*_mgr
)
250 struct radeon_drm_bufmgr
*mgr
= radeon_drm_bufmgr(_mgr
);
255 radeon_drm_bufmgr_create(struct radeon_libdrm_winsys
*rws
)
257 struct radeon_drm_bufmgr
*mgr
;
259 mgr
= CALLOC_STRUCT(radeon_drm_bufmgr
);
263 mgr
->base
.destroy
= radeon_drm_bufmgr_destroy
;
264 mgr
->base
.create_buffer
= radeon_drm_bufmgr_create_buffer
;
265 mgr
->base
.flush
= radeon_drm_bufmgr_flush
;
268 make_empty_list(&mgr
->buffer_map_list
);
272 static struct radeon_drm_buffer
*get_drm_buffer(struct pb_buffer
*_buf
)
274 struct radeon_drm_buffer
*buf
= NULL
;
276 if (_buf
->vtbl
== &radeon_drm_buffer_vtbl
) {
277 buf
= radeon_drm_buffer(_buf
);
279 struct pb_buffer
*base_buf
;
281 pb_get_base_buffer(_buf
, &base_buf
, &offset
);
283 if (base_buf
->vtbl
== &radeon_drm_buffer_vtbl
)
284 buf
= radeon_drm_buffer(base_buf
);
290 void *radeon_drm_buffer_map(struct r300_winsys_screen
*ws
,
291 struct r300_winsys_buffer
*buf
,
292 struct r300_winsys_cs
*cs
,
293 enum pipe_transfer_usage usage
)
295 struct pb_buffer
*_buf
= radeon_pb_buffer(buf
);
297 return pb_map(_buf
, get_pb_usage_from_transfer_flags(usage
), radeon_libdrm_cs(cs
));
300 void radeon_drm_buffer_unmap(struct r300_winsys_screen
*ws
,
301 struct r300_winsys_buffer
*buf
)
303 struct pb_buffer
*_buf
= radeon_pb_buffer(buf
);
308 boolean
radeon_drm_bufmgr_get_handle(struct pb_buffer
*_buf
,
309 struct winsys_handle
*whandle
)
311 struct drm_gem_flink flink
;
312 struct radeon_drm_buffer
*buf
= get_drm_buffer(_buf
);
314 if (whandle
->type
== DRM_API_HANDLE_TYPE_SHARED
) {
316 flink
.handle
= buf
->bo
->handle
;
318 if (ioctl(buf
->mgr
->rws
->fd
, DRM_IOCTL_GEM_FLINK
, &flink
)) {
323 buf
->flink
= flink
.name
;
325 whandle
->handle
= buf
->flink
;
326 } else if (whandle
->type
== DRM_API_HANDLE_TYPE_KMS
) {
327 whandle
->handle
= buf
->bo
->handle
;
332 void radeon_drm_bufmgr_get_tiling(struct r300_winsys_screen
*ws
,
333 struct r300_winsys_buffer
*_buf
,
334 enum r300_buffer_tiling
*microtiled
,
335 enum r300_buffer_tiling
*macrotiled
)
337 struct radeon_drm_buffer
*buf
= get_drm_buffer(radeon_pb_buffer(_buf
));
338 uint32_t flags
= 0, pitch
;
340 radeon_bo_get_tiling(buf
->bo
, &flags
, &pitch
);
342 *microtiled
= R300_BUFFER_LINEAR
;
343 *macrotiled
= R300_BUFFER_LINEAR
;
344 if (flags
& RADEON_BO_FLAGS_MICRO_TILE
)
345 *microtiled
= R300_BUFFER_TILED
;
347 if (flags
& RADEON_BO_FLAGS_MACRO_TILE
)
348 *macrotiled
= R300_BUFFER_TILED
;
351 void radeon_drm_bufmgr_set_tiling(struct r300_winsys_screen
*ws
,
352 struct r300_winsys_buffer
*_buf
,
353 enum r300_buffer_tiling microtiled
,
354 enum r300_buffer_tiling macrotiled
,
357 struct radeon_drm_buffer
*buf
= get_drm_buffer(radeon_pb_buffer(_buf
));
359 if (microtiled
== R300_BUFFER_TILED
)
360 flags
|= RADEON_BO_FLAGS_MICRO_TILE
;
361 /* XXX Remove this ifdef when libdrm version 2.4.19 becomes mandatory. */
362 #ifdef RADEON_BO_FLAGS_MICRO_TILE_SQUARE
363 else if (microtiled
== R300_BUFFER_SQUARETILED
)
364 flags
|= RADEON_BO_FLAGS_MICRO_TILE_SQUARE
;
366 if (macrotiled
== R300_BUFFER_TILED
)
367 flags
|= RADEON_BO_FLAGS_MACRO_TILE
;
369 radeon_bo_set_tiling(buf
->bo
, flags
, pitch
);
372 static uint32_t get_gem_domain(enum r300_buffer_domain domain
)
376 if (domain
& R300_DOMAIN_GTT
)
377 res
|= RADEON_GEM_DOMAIN_GTT
;
378 if (domain
& R300_DOMAIN_VRAM
)
379 res
|= RADEON_GEM_DOMAIN_VRAM
;
383 void radeon_drm_bufmgr_add_buffer(struct r300_winsys_cs
*rcs
,
384 struct r300_winsys_buffer
*_buf
,
385 enum r300_buffer_domain rd
,
386 enum r300_buffer_domain wd
)
388 struct radeon_libdrm_cs
*cs
= radeon_libdrm_cs(rcs
);
389 struct radeon_drm_buffer
*buf
= get_drm_buffer(radeon_pb_buffer(_buf
));
390 uint32_t gem_rd
= get_gem_domain(rd
);
391 uint32_t gem_wd
= get_gem_domain(wd
);
393 radeon_cs_space_add_persistent_bo(cs
->cs
, buf
->bo
, gem_rd
, gem_wd
);
396 void radeon_drm_bufmgr_write_reloc(struct r300_winsys_cs
*rcs
,
397 struct r300_winsys_buffer
*_buf
,
398 enum r300_buffer_domain rd
,
399 enum r300_buffer_domain wd
)
401 struct radeon_libdrm_cs
*cs
= radeon_libdrm_cs(rcs
);
402 struct radeon_drm_buffer
*buf
= get_drm_buffer(radeon_pb_buffer(_buf
));
404 uint32_t gem_rd
= get_gem_domain(rd
);
405 uint32_t gem_wd
= get_gem_domain(wd
);
407 cs
->cs
->cdw
= cs
->base
.cdw
;
408 retval
= radeon_cs_write_reloc(cs
->cs
, buf
->bo
, gem_rd
, gem_wd
, 0);
409 cs
->base
.cdw
= cs
->cs
->cdw
;
411 fprintf(stderr
, "radeon: Relocation of %p (%d, %d, %d) failed!\n",
412 buf
, gem_rd
, gem_wd
, 0);
416 boolean
radeon_drm_bufmgr_is_buffer_referenced(struct r300_winsys_cs
*rcs
,
417 struct r300_winsys_buffer
*_buf
,
418 enum r300_reference_domain domain
)
420 struct radeon_libdrm_cs
*cs
= radeon_libdrm_cs(rcs
);
421 struct radeon_drm_buffer
*buf
= get_drm_buffer(radeon_pb_buffer(_buf
));
424 if (domain
& R300_REF_CS
) {
425 if (radeon_bo_is_referenced_by_cs(buf
->bo
, cs
->cs
)) {
430 if (domain
& R300_REF_HW
) {
431 if (radeon_bo_is_busy(buf
->bo
, &tmp
)) {
439 void radeon_drm_bufmgr_flush_maps(struct pb_manager
*_mgr
)
441 struct radeon_drm_bufmgr
*mgr
= radeon_drm_bufmgr(_mgr
);
442 struct radeon_drm_buffer
*rpb
, *t_rpb
;
444 foreach_s(rpb
, t_rpb
, &mgr
->buffer_map_list
) {
445 radeon_bo_unmap(rpb
->bo
);
447 remove_from_list(rpb
);
450 make_empty_list(&mgr
->buffer_map_list
);
453 void radeon_drm_bufmgr_wait(struct r300_winsys_screen
*ws
,
454 struct r300_winsys_buffer
*_buf
)
456 struct radeon_drm_buffer
*buf
= get_drm_buffer(radeon_pb_buffer(_buf
));
458 radeon_bo_wait(buf
->bo
);