r600g: add bo fenced list.
[mesa.git] / src / gallium / winsys / r600 / drm / radeon_bo_pb.c
1 /*
2 * Copyright 2010 Dave Airlie
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors:
24 * Dave Airlie
25 */
26 #include <util/u_inlines.h>
27 #include <util/u_memory.h>
28 #include <util/u_double_list.h>
29 #include <pipebuffer/pb_buffer.h>
30 #include <pipebuffer/pb_bufmgr.h>
31 #include "r600_priv.h"
32
33 struct radeon_bo_pb {
34 struct pb_buffer b;
35 struct radeon_bo *bo;
36
37 struct radeon_bo_pbmgr *mgr;
38 struct list_head maplist;
39 };
40
41 extern const struct pb_vtbl radeon_bo_pb_vtbl;
42
43 static INLINE struct radeon_bo_pb *radeon_bo_pb(struct pb_buffer *buf)
44 {
45 assert(buf);
46 assert(buf->vtbl == &radeon_bo_pb_vtbl);
47 return (struct radeon_bo_pb *)buf;
48 }
49
50 struct radeon_bo_pbmgr {
51 struct pb_manager b;
52 struct radeon *radeon;
53 struct list_head buffer_map_list;
54 };
55
56 static INLINE struct radeon_bo_pbmgr *radeon_bo_pbmgr(struct pb_manager *mgr)
57 {
58 assert(mgr);
59 return (struct radeon_bo_pbmgr *)mgr;
60 }
61
62 static void radeon_bo_pb_destroy(struct pb_buffer *_buf)
63 {
64 struct radeon_bo_pb *buf = radeon_bo_pb(_buf);
65
66 LIST_DEL(&buf->maplist);
67
68 if (buf->bo->data != NULL) {
69 radeon_bo_unmap(buf->mgr->radeon, buf->bo);
70 }
71 radeon_bo_reference(buf->mgr->radeon, &buf->bo, NULL);
72 FREE(buf);
73 }
74
75 static void *
76 radeon_bo_pb_map_internal(struct pb_buffer *_buf,
77 unsigned flags, void *ctx)
78 {
79 struct radeon_bo_pb *buf = radeon_bo_pb(_buf);
80 struct pipe_context *pctx = ctx;
81
82 if (flags & PB_USAGE_UNSYNCHRONIZED) {
83 if (!buf->bo->data && radeon_bo_map(buf->mgr->radeon, buf->bo)) {
84 return NULL;
85 }
86 LIST_DELINIT(&buf->maplist);
87 return buf->bo->data;
88 }
89
90 if (p_atomic_read(&buf->bo->reference.count) > 1) {
91 if (flags & PB_USAGE_DONTBLOCK) {
92 return NULL;
93 }
94 if (ctx) {
95 pctx->flush(pctx, 0, NULL);
96 }
97 }
98
99 if (flags & PB_USAGE_DONTBLOCK) {
100 uint32_t domain;
101 if (radeon_bo_busy(buf->mgr->radeon, buf->bo, &domain))
102 return NULL;
103 }
104
105 if (buf->bo->data != NULL) {
106 if (radeon_bo_wait(buf->mgr->radeon, buf->bo)) {
107 return NULL;
108 }
109 } else {
110 if (radeon_bo_map(buf->mgr->radeon, buf->bo)) {
111 return NULL;
112 }
113 if (radeon_bo_wait(buf->mgr->radeon, buf->bo)) {
114 radeon_bo_unmap(buf->mgr->radeon, buf->bo);
115 return NULL;
116 }
117 }
118
119 LIST_DELINIT(&buf->maplist);
120 return buf->bo->data;
121 }
122
123 static void radeon_bo_pb_unmap_internal(struct pb_buffer *_buf)
124 {
125 struct radeon_bo_pb *buf = radeon_bo_pb(_buf);
126 LIST_ADDTAIL(&buf->maplist, &buf->mgr->buffer_map_list);
127 }
128
129 static void
130 radeon_bo_pb_get_base_buffer(struct pb_buffer *buf,
131 struct pb_buffer **base_buf,
132 unsigned *offset)
133 {
134 *base_buf = buf;
135 *offset = 0;
136 }
137
138 static enum pipe_error
139 radeon_bo_pb_validate(struct pb_buffer *_buf,
140 struct pb_validate *vl,
141 unsigned flags)
142 {
143 /* Always pinned */
144 return PIPE_OK;
145 }
146
147 static void
148 radeon_bo_pb_fence(struct pb_buffer *buf,
149 struct pipe_fence_handle *fence)
150 {
151 }
152
153 const struct pb_vtbl radeon_bo_pb_vtbl = {
154 radeon_bo_pb_destroy,
155 radeon_bo_pb_map_internal,
156 radeon_bo_pb_unmap_internal,
157 radeon_bo_pb_validate,
158 radeon_bo_pb_fence,
159 radeon_bo_pb_get_base_buffer,
160 };
161
162 struct pb_buffer *
163 radeon_bo_pb_create_buffer_from_handle(struct pb_manager *_mgr,
164 uint32_t handle)
165 {
166 struct radeon_bo_pbmgr *mgr = radeon_bo_pbmgr(_mgr);
167 struct radeon *radeon = mgr->radeon;
168 struct radeon_bo_pb *bo;
169 struct radeon_bo *hw_bo;
170
171 hw_bo = radeon_bo(radeon, handle, 0, 0, NULL);
172 if (hw_bo == NULL)
173 return NULL;
174
175 bo = CALLOC_STRUCT(radeon_bo_pb);
176 if (!bo) {
177 radeon_bo_reference(radeon, &hw_bo, NULL);
178 return NULL;
179 }
180
181 LIST_INITHEAD(&bo->maplist);
182 pipe_reference_init(&bo->b.base.reference, 1);
183 bo->b.base.alignment = 0;
184 bo->b.base.usage = PB_USAGE_GPU_WRITE | PB_USAGE_GPU_READ;
185 bo->b.base.size = hw_bo->size;
186 bo->b.vtbl = &radeon_bo_pb_vtbl;
187 bo->mgr = mgr;
188
189 bo->bo = hw_bo;
190
191 return &bo->b;
192 }
193
194 static struct pb_buffer *
195 radeon_bo_pb_create_buffer(struct pb_manager *_mgr,
196 pb_size size,
197 const struct pb_desc *desc)
198 {
199 struct radeon_bo_pbmgr *mgr = radeon_bo_pbmgr(_mgr);
200 struct radeon *radeon = mgr->radeon;
201 struct radeon_bo_pb *bo;
202
203 bo = CALLOC_STRUCT(radeon_bo_pb);
204 if (!bo)
205 goto error1;
206
207 pipe_reference_init(&bo->b.base.reference, 1);
208 bo->b.base.alignment = desc->alignment;
209 bo->b.base.usage = desc->usage;
210 bo->b.base.size = size;
211 bo->b.vtbl = &radeon_bo_pb_vtbl;
212 bo->mgr = mgr;
213
214 LIST_INITHEAD(&bo->maplist);
215
216 bo->bo = radeon_bo(radeon, 0, size,
217 desc->alignment, NULL);
218 if (bo->bo == NULL)
219 goto error2;
220 return &bo->b;
221
222 error2:
223 FREE(bo);
224 error1:
225 return NULL;
226 }
227
228 static void
229 radeon_bo_pbmgr_flush(struct pb_manager *mgr)
230 {
231 /* NOP */
232 }
233
234 static void
235 radeon_bo_pbmgr_destroy(struct pb_manager *_mgr)
236 {
237 struct radeon_bo_pbmgr *mgr = radeon_bo_pbmgr(_mgr);
238 FREE(mgr);
239 }
240
241 struct pb_manager *radeon_bo_pbmgr_create(struct radeon *radeon)
242 {
243 struct radeon_bo_pbmgr *mgr;
244
245 mgr = CALLOC_STRUCT(radeon_bo_pbmgr);
246 if (!mgr)
247 return NULL;
248
249 mgr->b.destroy = radeon_bo_pbmgr_destroy;
250 mgr->b.create_buffer = radeon_bo_pb_create_buffer;
251 mgr->b.flush = radeon_bo_pbmgr_flush;
252
253 mgr->radeon = radeon;
254 LIST_INITHEAD(&mgr->buffer_map_list);
255 return &mgr->b;
256 }
257
258 void radeon_bo_pbmgr_flush_maps(struct pb_manager *_mgr)
259 {
260 struct radeon_bo_pbmgr *mgr = radeon_bo_pbmgr(_mgr);
261 struct radeon_bo_pb *rpb = NULL;
262 struct radeon_bo_pb *t_rpb;
263
264 LIST_FOR_EACH_ENTRY_SAFE(rpb, t_rpb, &mgr->buffer_map_list, maplist) {
265 radeon_bo_unmap(mgr->radeon, rpb->bo);
266 LIST_DELINIT(&rpb->maplist);
267 }
268
269 LIST_INITHEAD(&mgr->buffer_map_list);
270 }
271
272 struct radeon_bo *radeon_bo_pb_get_bo(struct pb_buffer *_buf)
273 {
274 struct radeon_bo_pb *buf;
275 if (_buf->vtbl == &radeon_bo_pb_vtbl) {
276 buf = radeon_bo_pb(_buf);
277 return buf->bo;
278 } else {
279 struct pb_buffer *base_buf;
280 pb_size offset;
281 pb_get_base_buffer(_buf, &base_buf, &offset);
282 if (base_buf->vtbl == &radeon_bo_pb_vtbl) {
283 buf = radeon_bo_pb(base_buf);
284 return buf->bo;
285 }
286 }
287 return NULL;
288 }