Merge remote-tracking branch 'mesa-public/master' into vulkan
[mesa.git] / src / gallium / drivers / nouveau / nvc0 / nvc0_context.c
1 /*
2 * Copyright 2010 Christoph Bumiller
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 */
22
23 #include "pipe/p_defines.h"
24 #include "util/u_framebuffer.h"
25
26 #include "nvc0/nvc0_context.h"
27 #include "nvc0/nvc0_screen.h"
28 #include "nvc0/nvc0_resource.h"
29
30 static void
31 nvc0_flush(struct pipe_context *pipe,
32 struct pipe_fence_handle **fence,
33 unsigned flags)
34 {
35 struct nvc0_context *nvc0 = nvc0_context(pipe);
36 struct nouveau_screen *screen = &nvc0->screen->base;
37
38 if (fence)
39 nouveau_fence_ref(screen->fence.current, (struct nouveau_fence **)fence);
40
41 PUSH_KICK(nvc0->base.pushbuf); /* fencing handled in kick_notify */
42
43 nouveau_context_update_frame_stats(&nvc0->base);
44 }
45
46 static void
47 nvc0_texture_barrier(struct pipe_context *pipe)
48 {
49 struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf;
50
51 IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
52 IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0);
53 }
54
55 static void
56 nvc0_memory_barrier(struct pipe_context *pipe, unsigned flags)
57 {
58 struct nvc0_context *nvc0 = nvc0_context(pipe);
59 int i, s;
60
61 if (flags & PIPE_BARRIER_MAPPED_BUFFER) {
62 for (i = 0; i < nvc0->num_vtxbufs; ++i) {
63 if (!nvc0->vtxbuf[i].buffer)
64 continue;
65 if (nvc0->vtxbuf[i].buffer->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
66 nvc0->base.vbo_dirty = true;
67 }
68
69 if (nvc0->idxbuf.buffer &&
70 nvc0->idxbuf.buffer->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
71 nvc0->base.vbo_dirty = true;
72
73 for (s = 0; s < 5 && !nvc0->cb_dirty; ++s) {
74 uint32_t valid = nvc0->constbuf_valid[s];
75
76 while (valid && !nvc0->cb_dirty) {
77 const unsigned i = ffs(valid) - 1;
78 struct pipe_resource *res;
79
80 valid &= ~(1 << i);
81 if (nvc0->constbuf[s][i].user)
82 continue;
83
84 res = nvc0->constbuf[s][i].u.buf;
85 if (!res)
86 continue;
87
88 if (res->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
89 nvc0->cb_dirty = true;
90 }
91 }
92 }
93 }
94
95 static void
96 nvc0_context_unreference_resources(struct nvc0_context *nvc0)
97 {
98 unsigned s, i;
99
100 nouveau_bufctx_del(&nvc0->bufctx_3d);
101 nouveau_bufctx_del(&nvc0->bufctx);
102 nouveau_bufctx_del(&nvc0->bufctx_cp);
103
104 util_unreference_framebuffer_state(&nvc0->framebuffer);
105
106 for (i = 0; i < nvc0->num_vtxbufs; ++i)
107 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, NULL);
108
109 pipe_resource_reference(&nvc0->idxbuf.buffer, NULL);
110
111 for (s = 0; s < 6; ++s) {
112 for (i = 0; i < nvc0->num_textures[s]; ++i)
113 pipe_sampler_view_reference(&nvc0->textures[s][i], NULL);
114
115 for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i)
116 if (!nvc0->constbuf[s][i].user)
117 pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, NULL);
118 }
119
120 for (s = 0; s < 2; ++s) {
121 for (i = 0; i < NVC0_MAX_SURFACE_SLOTS; ++i)
122 pipe_surface_reference(&nvc0->surfaces[s][i], NULL);
123 }
124
125 for (i = 0; i < nvc0->num_tfbbufs; ++i)
126 pipe_so_target_reference(&nvc0->tfbbuf[i], NULL);
127
128 for (i = 0; i < nvc0->global_residents.size / sizeof(struct pipe_resource *);
129 ++i) {
130 struct pipe_resource **res = util_dynarray_element(
131 &nvc0->global_residents, struct pipe_resource *, i);
132 pipe_resource_reference(res, NULL);
133 }
134 util_dynarray_fini(&nvc0->global_residents);
135
136 if (nvc0->tcp_empty)
137 nvc0->base.pipe.delete_tcs_state(&nvc0->base.pipe, nvc0->tcp_empty);
138 }
139
140 static void
141 nvc0_destroy(struct pipe_context *pipe)
142 {
143 struct nvc0_context *nvc0 = nvc0_context(pipe);
144
145 if (nvc0->screen->cur_ctx == nvc0) {
146 nvc0->screen->cur_ctx = NULL;
147 nvc0->screen->save_state = nvc0->state;
148 nvc0->screen->save_state.tfb = NULL;
149 }
150
151 /* Unset bufctx, we don't want to revalidate any resources after the flush.
152 * Other contexts will always set their bufctx again on action calls.
153 */
154 nouveau_pushbuf_bufctx(nvc0->base.pushbuf, NULL);
155 nouveau_pushbuf_kick(nvc0->base.pushbuf, nvc0->base.pushbuf->channel);
156
157 nvc0_context_unreference_resources(nvc0);
158 nvc0_blitctx_destroy(nvc0);
159
160 nouveau_context_destroy(&nvc0->base);
161 }
162
163 void
164 nvc0_default_kick_notify(struct nouveau_pushbuf *push)
165 {
166 struct nvc0_screen *screen = push->user_priv;
167
168 if (screen) {
169 nouveau_fence_next(&screen->base);
170 nouveau_fence_update(&screen->base, true);
171 if (screen->cur_ctx)
172 screen->cur_ctx->state.flushed = true;
173 NOUVEAU_DRV_STAT(&screen->base, pushbuf_count, 1);
174 }
175 }
176
177 static int
178 nvc0_invalidate_resource_storage(struct nouveau_context *ctx,
179 struct pipe_resource *res,
180 int ref)
181 {
182 struct nvc0_context *nvc0 = nvc0_context(&ctx->pipe);
183 unsigned bind = res->bind ? res->bind : PIPE_BIND_VERTEX_BUFFER;
184 unsigned s, i;
185
186 if (bind & PIPE_BIND_RENDER_TARGET) {
187 for (i = 0; i < nvc0->framebuffer.nr_cbufs; ++i) {
188 if (nvc0->framebuffer.cbufs[i] &&
189 nvc0->framebuffer.cbufs[i]->texture == res) {
190 nvc0->dirty |= NVC0_NEW_FRAMEBUFFER;
191 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_FB);
192 if (!--ref)
193 return ref;
194 }
195 }
196 }
197 if (bind & PIPE_BIND_DEPTH_STENCIL) {
198 if (nvc0->framebuffer.zsbuf &&
199 nvc0->framebuffer.zsbuf->texture == res) {
200 nvc0->dirty |= NVC0_NEW_FRAMEBUFFER;
201 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_FB);
202 if (!--ref)
203 return ref;
204 }
205 }
206
207 if (bind & (PIPE_BIND_VERTEX_BUFFER |
208 PIPE_BIND_INDEX_BUFFER |
209 PIPE_BIND_CONSTANT_BUFFER |
210 PIPE_BIND_STREAM_OUTPUT |
211 PIPE_BIND_COMMAND_ARGS_BUFFER |
212 PIPE_BIND_SAMPLER_VIEW)) {
213 for (i = 0; i < nvc0->num_vtxbufs; ++i) {
214 if (nvc0->vtxbuf[i].buffer == res) {
215 nvc0->dirty |= NVC0_NEW_ARRAYS;
216 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_VTX);
217 if (!--ref)
218 return ref;
219 }
220 }
221
222 if (nvc0->idxbuf.buffer == res) {
223 nvc0->dirty |= NVC0_NEW_IDXBUF;
224 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_IDX);
225 if (!--ref)
226 return ref;
227 }
228
229 for (s = 0; s < 5; ++s) {
230 for (i = 0; i < nvc0->num_textures[s]; ++i) {
231 if (nvc0->textures[s][i] &&
232 nvc0->textures[s][i]->texture == res) {
233 nvc0->textures_dirty[s] |= 1 << i;
234 nvc0->dirty |= NVC0_NEW_TEXTURES;
235 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_TEX(s, i));
236 if (!--ref)
237 return ref;
238 }
239 }
240 }
241
242 for (s = 0; s < 5; ++s) {
243 for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) {
244 if (!(nvc0->constbuf_valid[s] & (1 << i)))
245 continue;
246 if (!nvc0->constbuf[s][i].user &&
247 nvc0->constbuf[s][i].u.buf == res) {
248 nvc0->dirty |= NVC0_NEW_CONSTBUF;
249 nvc0->constbuf_dirty[s] |= 1 << i;
250 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_CB(s, i));
251 if (!--ref)
252 return ref;
253 }
254 }
255 }
256 }
257
258 return ref;
259 }
260
261 static void
262 nvc0_context_get_sample_position(struct pipe_context *, unsigned, unsigned,
263 float *);
264
265 struct pipe_context *
266 nvc0_create(struct pipe_screen *pscreen, void *priv, unsigned ctxflags)
267 {
268 struct nvc0_screen *screen = nvc0_screen(pscreen);
269 struct nvc0_context *nvc0;
270 struct pipe_context *pipe;
271 int ret;
272 uint32_t flags;
273
274 nvc0 = CALLOC_STRUCT(nvc0_context);
275 if (!nvc0)
276 return NULL;
277 pipe = &nvc0->base.pipe;
278
279 if (!nvc0_blitctx_create(nvc0))
280 goto out_err;
281
282 nvc0->base.pushbuf = screen->base.pushbuf;
283 nvc0->base.client = screen->base.client;
284
285 ret = nouveau_bufctx_new(screen->base.client, 2, &nvc0->bufctx);
286 if (!ret)
287 ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_3D_COUNT,
288 &nvc0->bufctx_3d);
289 if (!ret)
290 ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_CP_COUNT,
291 &nvc0->bufctx_cp);
292 if (ret)
293 goto out_err;
294
295 nvc0->screen = screen;
296 nvc0->base.screen = &screen->base;
297
298 pipe->screen = pscreen;
299 pipe->priv = priv;
300
301 pipe->destroy = nvc0_destroy;
302
303 pipe->draw_vbo = nvc0_draw_vbo;
304 pipe->clear = nvc0_clear;
305 pipe->launch_grid = (nvc0->screen->base.class_3d >= NVE4_3D_CLASS) ?
306 nve4_launch_grid : nvc0_launch_grid;
307
308 pipe->flush = nvc0_flush;
309 pipe->texture_barrier = nvc0_texture_barrier;
310 pipe->memory_barrier = nvc0_memory_barrier;
311 pipe->get_sample_position = nvc0_context_get_sample_position;
312
313 nouveau_context_init(&nvc0->base);
314 nvc0_init_query_functions(nvc0);
315 nvc0_init_surface_functions(nvc0);
316 nvc0_init_state_functions(nvc0);
317 nvc0_init_transfer_functions(nvc0);
318 nvc0_init_resource_functions(pipe);
319
320 nvc0->base.invalidate_resource_storage = nvc0_invalidate_resource_storage;
321
322 pipe->create_video_codec = nvc0_create_decoder;
323 pipe->create_video_buffer = nvc0_video_buffer_create;
324
325 /* shader builtin library is per-screen, but we need a context for m2mf */
326 nvc0_program_library_upload(nvc0);
327 nvc0_program_init_tcp_empty(nvc0);
328 if (!nvc0->tcp_empty)
329 goto out_err;
330 /* set the empty tctl prog on next draw in case one is never set */
331 nvc0->dirty |= NVC0_NEW_TCTLPROG;
332
333 /* now that there are no more opportunities for errors, set the current
334 * context if there isn't already one.
335 */
336 if (!screen->cur_ctx) {
337 nvc0->state = screen->save_state;
338 screen->cur_ctx = nvc0;
339 nouveau_pushbuf_bufctx(screen->base.pushbuf, nvc0->bufctx);
340 }
341 screen->base.pushbuf->kick_notify = nvc0_default_kick_notify;
342
343 /* add permanently resident buffers to bufctxts */
344
345 flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RD;
346
347 BCTX_REFN_bo(nvc0->bufctx_3d, SCREEN, flags, screen->text);
348 BCTX_REFN_bo(nvc0->bufctx_3d, SCREEN, flags, screen->uniform_bo);
349 BCTX_REFN_bo(nvc0->bufctx_3d, SCREEN, flags, screen->txc);
350 if (screen->compute) {
351 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->text);
352 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->txc);
353 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->parm);
354 }
355
356 flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RDWR;
357
358 if (screen->poly_cache)
359 BCTX_REFN_bo(nvc0->bufctx_3d, SCREEN, flags, screen->poly_cache);
360 if (screen->compute)
361 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->tls);
362
363 flags = NOUVEAU_BO_GART | NOUVEAU_BO_WR;
364
365 BCTX_REFN_bo(nvc0->bufctx_3d, SCREEN, flags, screen->fence.bo);
366 BCTX_REFN_bo(nvc0->bufctx, FENCE, flags, screen->fence.bo);
367 if (screen->compute)
368 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->fence.bo);
369
370 nvc0->base.scratch.bo_size = 2 << 20;
371
372 memset(nvc0->tex_handles, ~0, sizeof(nvc0->tex_handles));
373
374 util_dynarray_init(&nvc0->global_residents);
375
376 return pipe;
377
378 out_err:
379 if (nvc0) {
380 if (nvc0->bufctx_3d)
381 nouveau_bufctx_del(&nvc0->bufctx_3d);
382 if (nvc0->bufctx_cp)
383 nouveau_bufctx_del(&nvc0->bufctx_cp);
384 if (nvc0->bufctx)
385 nouveau_bufctx_del(&nvc0->bufctx);
386 FREE(nvc0->blit);
387 FREE(nvc0);
388 }
389 return NULL;
390 }
391
392 void
393 nvc0_bufctx_fence(struct nvc0_context *nvc0, struct nouveau_bufctx *bufctx,
394 bool on_flush)
395 {
396 struct nouveau_list *list = on_flush ? &bufctx->current : &bufctx->pending;
397 struct nouveau_list *it;
398 NOUVEAU_DRV_STAT_IFD(unsigned count = 0);
399
400 for (it = list->next; it != list; it = it->next) {
401 struct nouveau_bufref *ref = (struct nouveau_bufref *)it;
402 struct nv04_resource *res = ref->priv;
403 if (res)
404 nvc0_resource_validate(res, (unsigned)ref->priv_data);
405 NOUVEAU_DRV_STAT_IFD(count++);
406 }
407 NOUVEAU_DRV_STAT(&nvc0->screen->base, resource_validate_count, count);
408 }
409
410 static void
411 nvc0_context_get_sample_position(struct pipe_context *pipe,
412 unsigned sample_count, unsigned sample_index,
413 float *xy)
414 {
415 static const uint8_t ms1[1][2] = { { 0x8, 0x8 } };
416 static const uint8_t ms2[2][2] = {
417 { 0x4, 0x4 }, { 0xc, 0xc } }; /* surface coords (0,0), (1,0) */
418 static const uint8_t ms4[4][2] = {
419 { 0x6, 0x2 }, { 0xe, 0x6 }, /* (0,0), (1,0) */
420 { 0x2, 0xa }, { 0xa, 0xe } }; /* (0,1), (1,1) */
421 static const uint8_t ms8[8][2] = {
422 { 0x1, 0x7 }, { 0x5, 0x3 }, /* (0,0), (1,0) */
423 { 0x3, 0xd }, { 0x7, 0xb }, /* (0,1), (1,1) */
424 { 0x9, 0x5 }, { 0xf, 0x1 }, /* (2,0), (3,0) */
425 { 0xb, 0xf }, { 0xd, 0x9 } }; /* (2,1), (3,1) */
426 #if 0
427 /* NOTE: there are alternative modes for MS2 and MS8, currently not used */
428 static const uint8_t ms8_alt[8][2] = {
429 { 0x9, 0x5 }, { 0x7, 0xb }, /* (2,0), (1,1) */
430 { 0xd, 0x9 }, { 0x5, 0x3 }, /* (3,1), (1,0) */
431 { 0x3, 0xd }, { 0x1, 0x7 }, /* (0,1), (0,0) */
432 { 0xb, 0xf }, { 0xf, 0x1 } }; /* (2,1), (3,0) */
433 #endif
434
435 const uint8_t (*ptr)[2];
436
437 switch (sample_count) {
438 case 0:
439 case 1: ptr = ms1; break;
440 case 2: ptr = ms2; break;
441 case 4: ptr = ms4; break;
442 case 8: ptr = ms8; break;
443 default:
444 assert(0);
445 return; /* bad sample count -> undefined locations */
446 }
447 xy[0] = ptr[sample_index][0] * 0.0625f;
448 xy[1] = ptr[sample_index][1] * 0.0625f;
449 }