nvc0: add preliminary support for images
[mesa.git] / src / gallium / drivers / nouveau / nvc0 / nvc0_state_validate.c
1
2 #include "util/u_format.h"
3 #include "util/u_math.h"
4
5 #include "nvc0/nvc0_context.h"
6
7 #if 0
8 static void
9 nvc0_validate_zcull(struct nvc0_context *nvc0)
10 {
11 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
12 struct pipe_framebuffer_state *fb = &nvc0->framebuffer;
13 struct nv50_surface *sf = nv50_surface(fb->zsbuf);
14 struct nv50_miptree *mt = nv50_miptree(sf->base.texture);
15 struct nouveau_bo *bo = mt->base.bo;
16 uint32_t size;
17 uint32_t offset = align(mt->total_size, 1 << 17);
18 unsigned width, height;
19
20 assert(mt->base.base.depth0 == 1 && mt->base.base.array_size < 2);
21
22 size = mt->total_size * 2;
23
24 height = align(fb->height, 32);
25 width = fb->width % 224;
26 if (width)
27 width = fb->width + (224 - width);
28 else
29 width = fb->width;
30
31 BEGIN_NVC0(push, NVC0_3D(ZCULL_REGION), 1);
32 PUSH_DATA (push, 0);
33 BEGIN_NVC0(push, NVC0_3D(ZCULL_ADDRESS_HIGH), 2);
34 PUSH_DATAh(push, bo->offset + offset);
35 PUSH_DATA (push, bo->offset + offset);
36 offset += 1 << 17;
37 BEGIN_NVC0(push, NVC0_3D(ZCULL_LIMIT_HIGH), 2);
38 PUSH_DATAh(push, bo->offset + offset);
39 PUSH_DATA (push, bo->offset + offset);
40 BEGIN_NVC0(push, SUBC_3D(0x07e0), 2);
41 PUSH_DATA (push, size);
42 PUSH_DATA (push, size >> 16);
43 BEGIN_NVC0(push, SUBC_3D(0x15c8), 1); /* bits 0x3 */
44 PUSH_DATA (push, 2);
45 BEGIN_NVC0(push, NVC0_3D(ZCULL_WIDTH), 4);
46 PUSH_DATA (push, width);
47 PUSH_DATA (push, height);
48 PUSH_DATA (push, 1);
49 PUSH_DATA (push, 0);
50 BEGIN_NVC0(push, NVC0_3D(ZCULL_WINDOW_OFFSET_X), 2);
51 PUSH_DATA (push, 0);
52 PUSH_DATA (push, 0);
53 BEGIN_NVC0(push, NVC0_3D(ZCULL_INVALIDATE), 1);
54 PUSH_DATA (push, 0);
55 }
56 #endif
57
58 static inline void
59 nvc0_fb_set_null_rt(struct nouveau_pushbuf *push, unsigned i, unsigned layers)
60 {
61 BEGIN_NVC0(push, NVC0_3D(RT_ADDRESS_HIGH(i)), 9);
62 PUSH_DATA (push, 0);
63 PUSH_DATA (push, 0);
64 PUSH_DATA (push, 64); // width
65 PUSH_DATA (push, 0); // height
66 PUSH_DATA (push, 0); // format
67 PUSH_DATA (push, 0); // tile mode
68 PUSH_DATA (push, layers); // layers
69 PUSH_DATA (push, 0); // layer stride
70 PUSH_DATA (push, 0); // base layer
71 }
72
73 static void
74 nvc0_validate_fb(struct nvc0_context *nvc0)
75 {
76 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
77 struct pipe_framebuffer_state *fb = &nvc0->framebuffer;
78 struct nvc0_screen *screen = nvc0->screen;
79 unsigned i, ms;
80 unsigned ms_mode = NVC0_3D_MULTISAMPLE_MODE_MS1;
81 unsigned nr_cbufs = fb->nr_cbufs;
82 bool serialize = false;
83
84 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB);
85
86 BEGIN_NVC0(push, NVC0_3D(SCREEN_SCISSOR_HORIZ), 2);
87 PUSH_DATA (push, fb->width << 16);
88 PUSH_DATA (push, fb->height << 16);
89
90 for (i = 0; i < fb->nr_cbufs; ++i) {
91 struct nv50_surface *sf;
92 struct nv04_resource *res;
93 struct nouveau_bo *bo;
94
95 if (!fb->cbufs[i]) {
96 nvc0_fb_set_null_rt(push, i, 0);
97 continue;
98 }
99
100 sf = nv50_surface(fb->cbufs[i]);
101 res = nv04_resource(sf->base.texture);
102 bo = res->bo;
103
104 BEGIN_NVC0(push, NVC0_3D(RT_ADDRESS_HIGH(i)), 9);
105 PUSH_DATAh(push, res->address + sf->offset);
106 PUSH_DATA (push, res->address + sf->offset);
107 if (likely(nouveau_bo_memtype(bo))) {
108 struct nv50_miptree *mt = nv50_miptree(sf->base.texture);
109
110 assert(sf->base.texture->target != PIPE_BUFFER);
111
112 PUSH_DATA(push, sf->width);
113 PUSH_DATA(push, sf->height);
114 PUSH_DATA(push, nvc0_format_table[sf->base.format].rt);
115 PUSH_DATA(push, (mt->layout_3d << 16) |
116 mt->level[sf->base.u.tex.level].tile_mode);
117 PUSH_DATA(push, sf->base.u.tex.first_layer + sf->depth);
118 PUSH_DATA(push, mt->layer_stride >> 2);
119 PUSH_DATA(push, sf->base.u.tex.first_layer);
120
121 ms_mode = mt->ms_mode;
122 } else {
123 if (res->base.target == PIPE_BUFFER) {
124 PUSH_DATA(push, 262144);
125 PUSH_DATA(push, 1);
126 } else {
127 PUSH_DATA(push, nv50_miptree(sf->base.texture)->level[0].pitch);
128 PUSH_DATA(push, sf->height);
129 }
130 PUSH_DATA(push, nvc0_format_table[sf->base.format].rt);
131 PUSH_DATA(push, 1 << 12);
132 PUSH_DATA(push, 1);
133 PUSH_DATA(push, 0);
134 PUSH_DATA(push, 0);
135
136 nvc0_resource_fence(res, NOUVEAU_BO_WR);
137
138 assert(!fb->zsbuf);
139 }
140
141 if (res->status & NOUVEAU_BUFFER_STATUS_GPU_READING)
142 serialize = true;
143 res->status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
144 res->status &= ~NOUVEAU_BUFFER_STATUS_GPU_READING;
145
146 /* only register for writing, otherwise we'd always serialize here */
147 BCTX_REFN(nvc0->bufctx_3d, 3D_FB, res, WR);
148 }
149
150 if (fb->zsbuf) {
151 struct nv50_miptree *mt = nv50_miptree(fb->zsbuf->texture);
152 struct nv50_surface *sf = nv50_surface(fb->zsbuf);
153 int unk = mt->base.base.target == PIPE_TEXTURE_2D;
154
155 BEGIN_NVC0(push, NVC0_3D(ZETA_ADDRESS_HIGH), 5);
156 PUSH_DATAh(push, mt->base.address + sf->offset);
157 PUSH_DATA (push, mt->base.address + sf->offset);
158 PUSH_DATA (push, nvc0_format_table[fb->zsbuf->format].rt);
159 PUSH_DATA (push, mt->level[sf->base.u.tex.level].tile_mode);
160 PUSH_DATA (push, mt->layer_stride >> 2);
161 BEGIN_NVC0(push, NVC0_3D(ZETA_ENABLE), 1);
162 PUSH_DATA (push, 1);
163 BEGIN_NVC0(push, NVC0_3D(ZETA_HORIZ), 3);
164 PUSH_DATA (push, sf->width);
165 PUSH_DATA (push, sf->height);
166 PUSH_DATA (push, (unk << 16) |
167 (sf->base.u.tex.first_layer + sf->depth));
168 BEGIN_NVC0(push, NVC0_3D(ZETA_BASE_LAYER), 1);
169 PUSH_DATA (push, sf->base.u.tex.first_layer);
170
171 ms_mode = mt->ms_mode;
172
173 if (mt->base.status & NOUVEAU_BUFFER_STATUS_GPU_READING)
174 serialize = true;
175 mt->base.status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
176 mt->base.status &= ~NOUVEAU_BUFFER_STATUS_GPU_READING;
177
178 BCTX_REFN(nvc0->bufctx_3d, 3D_FB, &mt->base, WR);
179 } else {
180 BEGIN_NVC0(push, NVC0_3D(ZETA_ENABLE), 1);
181 PUSH_DATA (push, 0);
182 }
183
184 if (nr_cbufs == 0 && !fb->zsbuf) {
185 assert(util_is_power_of_two(fb->samples));
186 assert(fb->samples <= 8);
187
188 nvc0_fb_set_null_rt(push, 0, fb->layers);
189
190 if (fb->samples > 1)
191 ms_mode = ffs(fb->samples) - 1;
192 nr_cbufs = 1;
193 }
194
195 BEGIN_NVC0(push, NVC0_3D(RT_CONTROL), 1);
196 PUSH_DATA (push, (076543210 << 4) | nr_cbufs);
197 IMMED_NVC0(push, NVC0_3D(MULTISAMPLE_MODE), ms_mode);
198
199 ms = 1 << ms_mode;
200 BEGIN_NVC0(push, NVC0_3D(CB_SIZE), 3);
201 PUSH_DATA (push, 1024);
202 PUSH_DATAh(push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(4));
203 PUSH_DATA (push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(4));
204 BEGIN_1IC0(push, NVC0_3D(CB_POS), 1 + 2 * ms);
205 PUSH_DATA (push, NVC0_CB_AUX_SAMPLE_INFO);
206 for (i = 0; i < ms; i++) {
207 float xy[2];
208 nvc0->base.pipe.get_sample_position(&nvc0->base.pipe, ms, i, xy);
209 PUSH_DATAf(push, xy[0]);
210 PUSH_DATAf(push, xy[1]);
211 }
212
213 if (serialize)
214 IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
215
216 NOUVEAU_DRV_STAT(&nvc0->screen->base, gpu_serialize_count, serialize);
217 }
218
219 static void
220 nvc0_validate_blend_colour(struct nvc0_context *nvc0)
221 {
222 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
223
224 BEGIN_NVC0(push, NVC0_3D(BLEND_COLOR(0)), 4);
225 PUSH_DATAf(push, nvc0->blend_colour.color[0]);
226 PUSH_DATAf(push, nvc0->blend_colour.color[1]);
227 PUSH_DATAf(push, nvc0->blend_colour.color[2]);
228 PUSH_DATAf(push, nvc0->blend_colour.color[3]);
229 }
230
231 static void
232 nvc0_validate_stencil_ref(struct nvc0_context *nvc0)
233 {
234 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
235 const ubyte *ref = &nvc0->stencil_ref.ref_value[0];
236
237 IMMED_NVC0(push, NVC0_3D(STENCIL_FRONT_FUNC_REF), ref[0]);
238 IMMED_NVC0(push, NVC0_3D(STENCIL_BACK_FUNC_REF), ref[1]);
239 }
240
241 static void
242 nvc0_validate_stipple(struct nvc0_context *nvc0)
243 {
244 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
245 unsigned i;
246
247 BEGIN_NVC0(push, NVC0_3D(POLYGON_STIPPLE_PATTERN(0)), 32);
248 for (i = 0; i < 32; ++i)
249 PUSH_DATA(push, util_bswap32(nvc0->stipple.stipple[i]));
250 }
251
252 static void
253 nvc0_validate_scissor(struct nvc0_context *nvc0)
254 {
255 int i;
256 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
257
258 if (!(nvc0->dirty_3d & NVC0_NEW_3D_SCISSOR) &&
259 nvc0->rast->pipe.scissor == nvc0->state.scissor)
260 return;
261
262 if (nvc0->state.scissor != nvc0->rast->pipe.scissor)
263 nvc0->scissors_dirty = (1 << NVC0_MAX_VIEWPORTS) - 1;
264
265 nvc0->state.scissor = nvc0->rast->pipe.scissor;
266
267 for (i = 0; i < NVC0_MAX_VIEWPORTS; i++) {
268 struct pipe_scissor_state *s = &nvc0->scissors[i];
269 if (!(nvc0->scissors_dirty & (1 << i)))
270 continue;
271
272 BEGIN_NVC0(push, NVC0_3D(SCISSOR_HORIZ(i)), 2);
273 if (nvc0->rast->pipe.scissor) {
274 PUSH_DATA(push, (s->maxx << 16) | s->minx);
275 PUSH_DATA(push, (s->maxy << 16) | s->miny);
276 } else {
277 PUSH_DATA(push, (0xffff << 16) | 0);
278 PUSH_DATA(push, (0xffff << 16) | 0);
279 }
280 }
281 nvc0->scissors_dirty = 0;
282 }
283
284 static void
285 nvc0_validate_viewport(struct nvc0_context *nvc0)
286 {
287 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
288 int x, y, w, h, i;
289 float zmin, zmax;
290
291 for (i = 0; i < NVC0_MAX_VIEWPORTS; i++) {
292 struct pipe_viewport_state *vp = &nvc0->viewports[i];
293
294 if (!(nvc0->viewports_dirty & (1 << i)))
295 continue;
296
297 BEGIN_NVC0(push, NVC0_3D(VIEWPORT_TRANSLATE_X(i)), 3);
298 PUSH_DATAf(push, vp->translate[0]);
299 PUSH_DATAf(push, vp->translate[1]);
300 PUSH_DATAf(push, vp->translate[2]);
301
302 BEGIN_NVC0(push, NVC0_3D(VIEWPORT_SCALE_X(i)), 3);
303 PUSH_DATAf(push, vp->scale[0]);
304 PUSH_DATAf(push, vp->scale[1]);
305 PUSH_DATAf(push, vp->scale[2]);
306
307 /* now set the viewport rectangle to viewport dimensions for clipping */
308
309 x = util_iround(MAX2(0.0f, vp->translate[0] - fabsf(vp->scale[0])));
310 y = util_iround(MAX2(0.0f, vp->translate[1] - fabsf(vp->scale[1])));
311 w = util_iround(vp->translate[0] + fabsf(vp->scale[0])) - x;
312 h = util_iround(vp->translate[1] + fabsf(vp->scale[1])) - y;
313
314 BEGIN_NVC0(push, NVC0_3D(VIEWPORT_HORIZ(i)), 2);
315 PUSH_DATA (push, (w << 16) | x);
316 PUSH_DATA (push, (h << 16) | y);
317
318 zmin = vp->translate[2] - fabsf(vp->scale[2]);
319 zmax = vp->translate[2] + fabsf(vp->scale[2]);
320
321 BEGIN_NVC0(push, NVC0_3D(DEPTH_RANGE_NEAR(i)), 2);
322 PUSH_DATAf(push, zmin);
323 PUSH_DATAf(push, zmax);
324 }
325 nvc0->viewports_dirty = 0;
326 }
327
328 static inline void
329 nvc0_upload_uclip_planes(struct nvc0_context *nvc0, unsigned s)
330 {
331 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
332 struct nvc0_screen *screen = nvc0->screen;
333
334 BEGIN_NVC0(push, NVC0_3D(CB_SIZE), 3);
335 PUSH_DATA (push, 1024);
336 PUSH_DATAh(push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(s));
337 PUSH_DATA (push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(s));
338 BEGIN_1IC0(push, NVC0_3D(CB_POS), PIPE_MAX_CLIP_PLANES * 4 + 1);
339 PUSH_DATA (push, NVC0_CB_AUX_UCP_INFO);
340 PUSH_DATAp(push, &nvc0->clip.ucp[0][0], PIPE_MAX_CLIP_PLANES * 4);
341 }
342
343 static inline void
344 nvc0_check_program_ucps(struct nvc0_context *nvc0,
345 struct nvc0_program *vp, uint8_t mask)
346 {
347 const unsigned n = util_logbase2(mask) + 1;
348
349 if (vp->vp.num_ucps >= n)
350 return;
351 nvc0_program_destroy(nvc0, vp);
352
353 vp->vp.num_ucps = n;
354 if (likely(vp == nvc0->vertprog))
355 nvc0_vertprog_validate(nvc0);
356 else
357 if (likely(vp == nvc0->gmtyprog))
358 nvc0_gmtyprog_validate(nvc0);
359 else
360 nvc0_tevlprog_validate(nvc0);
361 }
362
363 static void
364 nvc0_validate_clip(struct nvc0_context *nvc0)
365 {
366 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
367 struct nvc0_program *vp;
368 unsigned stage;
369 uint8_t clip_enable = nvc0->rast->pipe.clip_plane_enable;
370
371 if (nvc0->gmtyprog) {
372 stage = 3;
373 vp = nvc0->gmtyprog;
374 } else
375 if (nvc0->tevlprog) {
376 stage = 2;
377 vp = nvc0->tevlprog;
378 } else {
379 stage = 0;
380 vp = nvc0->vertprog;
381 }
382
383 if (clip_enable && vp->vp.num_ucps < PIPE_MAX_CLIP_PLANES)
384 nvc0_check_program_ucps(nvc0, vp, clip_enable);
385
386 if (nvc0->dirty_3d & (NVC0_NEW_3D_CLIP | (NVC0_NEW_3D_VERTPROG << stage)))
387 if (vp->vp.num_ucps > 0 && vp->vp.num_ucps <= PIPE_MAX_CLIP_PLANES)
388 nvc0_upload_uclip_planes(nvc0, stage);
389
390 clip_enable &= vp->vp.clip_enable;
391
392 if (nvc0->state.clip_enable != clip_enable) {
393 nvc0->state.clip_enable = clip_enable;
394 IMMED_NVC0(push, NVC0_3D(CLIP_DISTANCE_ENABLE), clip_enable);
395 }
396 if (nvc0->state.clip_mode != vp->vp.clip_mode) {
397 nvc0->state.clip_mode = vp->vp.clip_mode;
398 BEGIN_NVC0(push, NVC0_3D(CLIP_DISTANCE_MODE), 1);
399 PUSH_DATA (push, vp->vp.clip_mode);
400 }
401 }
402
403 static void
404 nvc0_validate_blend(struct nvc0_context *nvc0)
405 {
406 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
407
408 PUSH_SPACE(push, nvc0->blend->size);
409 PUSH_DATAp(push, nvc0->blend->state, nvc0->blend->size);
410 }
411
412 static void
413 nvc0_validate_zsa(struct nvc0_context *nvc0)
414 {
415 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
416
417 PUSH_SPACE(push, nvc0->zsa->size);
418 PUSH_DATAp(push, nvc0->zsa->state, nvc0->zsa->size);
419 }
420
421 static void
422 nvc0_validate_rasterizer(struct nvc0_context *nvc0)
423 {
424 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
425
426 PUSH_SPACE(push, nvc0->rast->size);
427 PUSH_DATAp(push, nvc0->rast->state, nvc0->rast->size);
428 }
429
430 static void
431 nvc0_constbufs_validate(struct nvc0_context *nvc0)
432 {
433 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
434 unsigned s;
435
436 for (s = 0; s < 5; ++s) {
437 while (nvc0->constbuf_dirty[s]) {
438 int i = ffs(nvc0->constbuf_dirty[s]) - 1;
439 nvc0->constbuf_dirty[s] &= ~(1 << i);
440
441 if (nvc0->constbuf[s][i].user) {
442 struct nouveau_bo *bo = nvc0->screen->uniform_bo;
443 const unsigned base = NVC0_CB_USR_INFO(s);
444 const unsigned size = nvc0->constbuf[s][0].size;
445 assert(i == 0); /* we really only want OpenGL uniforms here */
446 assert(nvc0->constbuf[s][0].u.data);
447
448 if (nvc0->state.uniform_buffer_bound[s] < size) {
449 nvc0->state.uniform_buffer_bound[s] = align(size, 0x100);
450
451 BEGIN_NVC0(push, NVC0_3D(CB_SIZE), 3);
452 PUSH_DATA (push, nvc0->state.uniform_buffer_bound[s]);
453 PUSH_DATAh(push, bo->offset + base);
454 PUSH_DATA (push, bo->offset + base);
455 BEGIN_NVC0(push, NVC0_3D(CB_BIND(s)), 1);
456 PUSH_DATA (push, (0 << 4) | 1);
457 }
458 nvc0_cb_bo_push(&nvc0->base, bo, NV_VRAM_DOMAIN(&nvc0->screen->base),
459 base, nvc0->state.uniform_buffer_bound[s],
460 0, (size + 3) / 4,
461 nvc0->constbuf[s][0].u.data);
462 } else {
463 struct nv04_resource *res =
464 nv04_resource(nvc0->constbuf[s][i].u.buf);
465 if (res) {
466 BEGIN_NVC0(push, NVC0_3D(CB_SIZE), 3);
467 PUSH_DATA (push, nvc0->constbuf[s][i].size);
468 PUSH_DATAh(push, res->address + nvc0->constbuf[s][i].offset);
469 PUSH_DATA (push, res->address + nvc0->constbuf[s][i].offset);
470 BEGIN_NVC0(push, NVC0_3D(CB_BIND(s)), 1);
471 PUSH_DATA (push, (i << 4) | 1);
472
473 BCTX_REFN(nvc0->bufctx_3d, 3D_CB(s, i), res, RD);
474
475 nvc0->cb_dirty = 1; /* Force cache flush for UBO. */
476 res->cb_bindings[s] |= 1 << i;
477 } else {
478 BEGIN_NVC0(push, NVC0_3D(CB_BIND(s)), 1);
479 PUSH_DATA (push, (i << 4) | 0);
480 }
481 if (i == 0)
482 nvc0->state.uniform_buffer_bound[s] = 0;
483 }
484 }
485 }
486
487 /* Invalidate all COMPUTE constbufs because they are aliased with 3D. */
488 nvc0->dirty_cp |= NVC0_NEW_CP_CONSTBUF;
489 nvc0->constbuf_dirty[5] |= nvc0->constbuf_valid[5];
490 nvc0->state.uniform_buffer_bound[5] = 0;
491 }
492
493 static void
494 nvc0_validate_buffers(struct nvc0_context *nvc0)
495 {
496 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
497 struct nvc0_screen *screen = nvc0->screen;
498 int i, s;
499
500 for (s = 0; s < 5; s++) {
501 BEGIN_NVC0(push, NVC0_3D(CB_SIZE), 3);
502 PUSH_DATA (push, 1024);
503 PUSH_DATAh(push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(s));
504 PUSH_DATA (push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(s));
505 BEGIN_1IC0(push, NVC0_3D(CB_POS), 1 + 4 * NVC0_MAX_BUFFERS);
506 PUSH_DATA (push, NVC0_CB_AUX_BUF_INFO(0));
507 for (i = 0; i < NVC0_MAX_BUFFERS; i++) {
508 if (nvc0->buffers[s][i].buffer) {
509 struct nv04_resource *res =
510 nv04_resource(nvc0->buffers[s][i].buffer);
511 PUSH_DATA (push, res->address + nvc0->buffers[s][i].buffer_offset);
512 PUSH_DATAh(push, res->address + nvc0->buffers[s][i].buffer_offset);
513 PUSH_DATA (push, nvc0->buffers[s][i].buffer_size);
514 PUSH_DATA (push, 0);
515 BCTX_REFN(nvc0->bufctx_3d, 3D_BUF, res, RDWR);
516 } else {
517 PUSH_DATA (push, 0);
518 PUSH_DATA (push, 0);
519 PUSH_DATA (push, 0);
520 PUSH_DATA (push, 0);
521 }
522 }
523 }
524
525 }
526
527 static void
528 nvc0_validate_sample_mask(struct nvc0_context *nvc0)
529 {
530 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
531
532 unsigned mask[4] =
533 {
534 nvc0->sample_mask & 0xffff,
535 nvc0->sample_mask & 0xffff,
536 nvc0->sample_mask & 0xffff,
537 nvc0->sample_mask & 0xffff
538 };
539
540 BEGIN_NVC0(push, NVC0_3D(MSAA_MASK(0)), 4);
541 PUSH_DATA (push, mask[0]);
542 PUSH_DATA (push, mask[1]);
543 PUSH_DATA (push, mask[2]);
544 PUSH_DATA (push, mask[3]);
545 }
546
547 static void
548 nvc0_validate_min_samples(struct nvc0_context *nvc0)
549 {
550 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
551 int samples;
552
553 samples = util_next_power_of_two(nvc0->min_samples);
554 if (samples > 1)
555 samples |= NVC0_3D_SAMPLE_SHADING_ENABLE;
556
557 IMMED_NVC0(push, NVC0_3D(SAMPLE_SHADING), samples);
558 }
559
560 static void
561 nvc0_validate_driverconst(struct nvc0_context *nvc0)
562 {
563 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
564 struct nvc0_screen *screen = nvc0->screen;
565 int i;
566
567 for (i = 0; i < 5; ++i) {
568 BEGIN_NVC0(push, NVC0_3D(CB_SIZE), 3);
569 PUSH_DATA (push, 1024);
570 PUSH_DATAh(push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(i));
571 PUSH_DATA (push, screen->uniform_bo->offset + NVC0_CB_AUX_INFO(i));
572 BEGIN_NVC0(push, NVC0_3D(CB_BIND(i)), 1);
573 PUSH_DATA (push, (15 << 4) | 1);
574 }
575
576 nvc0->dirty_cp |= NVC0_NEW_CP_DRIVERCONST;
577 }
578
579 static void
580 nvc0_validate_derived_1(struct nvc0_context *nvc0)
581 {
582 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
583 bool rasterizer_discard;
584
585 if (nvc0->rast && nvc0->rast->pipe.rasterizer_discard) {
586 rasterizer_discard = true;
587 } else {
588 bool zs = nvc0->zsa &&
589 (nvc0->zsa->pipe.depth.enabled || nvc0->zsa->pipe.stencil[0].enabled);
590 rasterizer_discard = !zs &&
591 (!nvc0->fragprog || !nvc0->fragprog->hdr[18]);
592 }
593
594 if (rasterizer_discard != nvc0->state.rasterizer_discard) {
595 nvc0->state.rasterizer_discard = rasterizer_discard;
596 IMMED_NVC0(push, NVC0_3D(RASTERIZE_ENABLE), !rasterizer_discard);
597 }
598 }
599
600 /* alpha test is disabled if there are no color RTs, so make sure we have at
601 * least one if alpha test is enabled. Note that this must run after
602 * nvc0_validate_fb, otherwise that will override the RT count setting.
603 */
604 static void
605 nvc0_validate_derived_2(struct nvc0_context *nvc0)
606 {
607 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
608
609 if (nvc0->zsa && nvc0->zsa->pipe.alpha.enabled &&
610 nvc0->framebuffer.zsbuf &&
611 nvc0->framebuffer.nr_cbufs == 0) {
612 nvc0_fb_set_null_rt(push, 0, 0);
613 BEGIN_NVC0(push, NVC0_3D(RT_CONTROL), 1);
614 PUSH_DATA (push, (076543210 << 4) | 1);
615 }
616 }
617
618 static void
619 nvc0_validate_derived_3(struct nvc0_context *nvc0)
620 {
621 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
622 struct pipe_framebuffer_state *fb = &nvc0->framebuffer;
623 uint32_t ms = 0;
624
625 if ((!fb->nr_cbufs || !fb->cbufs[0] ||
626 !util_format_is_pure_integer(fb->cbufs[0]->format)) && nvc0->blend) {
627 if (nvc0->blend->pipe.alpha_to_coverage)
628 ms |= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_COVERAGE;
629 if (nvc0->blend->pipe.alpha_to_one)
630 ms |= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_ONE;
631 }
632
633 BEGIN_NVC0(push, NVC0_3D(MULTISAMPLE_CTRL), 1);
634 PUSH_DATA (push, ms);
635 }
636
637 static void
638 nvc0_validate_tess_state(struct nvc0_context *nvc0)
639 {
640 struct nouveau_pushbuf *push = nvc0->base.pushbuf;
641
642 BEGIN_NVC0(push, NVC0_3D(TESS_LEVEL_OUTER(0)), 6);
643 PUSH_DATAp(push, nvc0->default_tess_outer, 4);
644 PUSH_DATAp(push, nvc0->default_tess_inner, 2);
645 }
646
647 static void
648 nvc0_switch_pipe_context(struct nvc0_context *ctx_to)
649 {
650 struct nvc0_context *ctx_from = ctx_to->screen->cur_ctx;
651 unsigned s;
652
653 if (ctx_from)
654 ctx_to->state = ctx_from->state;
655 else
656 ctx_to->state = ctx_to->screen->save_state;
657
658 ctx_to->dirty_3d = ~0;
659 ctx_to->dirty_cp = ~0;
660 ctx_to->viewports_dirty = ~0;
661 ctx_to->scissors_dirty = ~0;
662
663 for (s = 0; s < 6; ++s) {
664 ctx_to->samplers_dirty[s] = ~0;
665 ctx_to->textures_dirty[s] = ~0;
666 ctx_to->constbuf_dirty[s] = (1 << NVC0_MAX_PIPE_CONSTBUFS) - 1;
667 ctx_to->buffers_dirty[s] = ~0;
668 ctx_to->images_dirty[s] = ~0;
669 }
670
671 /* Reset tfb as the shader that owns it may have been deleted. */
672 ctx_to->state.tfb = NULL;
673
674 if (!ctx_to->vertex)
675 ctx_to->dirty_3d &= ~(NVC0_NEW_3D_VERTEX | NVC0_NEW_3D_ARRAYS);
676 if (!ctx_to->idxbuf.buffer)
677 ctx_to->dirty_3d &= ~NVC0_NEW_3D_IDXBUF;
678
679 if (!ctx_to->vertprog)
680 ctx_to->dirty_3d &= ~NVC0_NEW_3D_VERTPROG;
681 if (!ctx_to->fragprog)
682 ctx_to->dirty_3d &= ~NVC0_NEW_3D_FRAGPROG;
683
684 if (!ctx_to->blend)
685 ctx_to->dirty_3d &= ~NVC0_NEW_3D_BLEND;
686 if (!ctx_to->rast)
687 ctx_to->dirty_3d &= ~(NVC0_NEW_3D_RASTERIZER | NVC0_NEW_3D_SCISSOR);
688 if (!ctx_to->zsa)
689 ctx_to->dirty_3d &= ~NVC0_NEW_3D_ZSA;
690
691 ctx_to->screen->cur_ctx = ctx_to;
692 }
693
694 static struct nvc0_state_validate
695 validate_list_3d[] = {
696 { nvc0_validate_fb, NVC0_NEW_3D_FRAMEBUFFER },
697 { nvc0_validate_blend, NVC0_NEW_3D_BLEND },
698 { nvc0_validate_zsa, NVC0_NEW_3D_ZSA },
699 { nvc0_validate_sample_mask, NVC0_NEW_3D_SAMPLE_MASK },
700 { nvc0_validate_rasterizer, NVC0_NEW_3D_RASTERIZER },
701 { nvc0_validate_blend_colour, NVC0_NEW_3D_BLEND_COLOUR },
702 { nvc0_validate_stencil_ref, NVC0_NEW_3D_STENCIL_REF },
703 { nvc0_validate_stipple, NVC0_NEW_3D_STIPPLE },
704 { nvc0_validate_scissor, NVC0_NEW_3D_SCISSOR | NVC0_NEW_3D_RASTERIZER },
705 { nvc0_validate_viewport, NVC0_NEW_3D_VIEWPORT },
706 { nvc0_vertprog_validate, NVC0_NEW_3D_VERTPROG },
707 { nvc0_tctlprog_validate, NVC0_NEW_3D_TCTLPROG },
708 { nvc0_tevlprog_validate, NVC0_NEW_3D_TEVLPROG },
709 { nvc0_validate_tess_state, NVC0_NEW_3D_TESSFACTOR },
710 { nvc0_gmtyprog_validate, NVC0_NEW_3D_GMTYPROG },
711 { nvc0_fragprog_validate, NVC0_NEW_3D_FRAGPROG | NVC0_NEW_3D_RASTERIZER },
712 { nvc0_validate_derived_1, NVC0_NEW_3D_FRAGPROG | NVC0_NEW_3D_ZSA |
713 NVC0_NEW_3D_RASTERIZER },
714 { nvc0_validate_derived_2, NVC0_NEW_3D_ZSA | NVC0_NEW_3D_FRAMEBUFFER },
715 { nvc0_validate_derived_3, NVC0_NEW_3D_BLEND | NVC0_NEW_3D_FRAMEBUFFER },
716 { nvc0_validate_clip, NVC0_NEW_3D_CLIP | NVC0_NEW_3D_RASTERIZER |
717 NVC0_NEW_3D_VERTPROG |
718 NVC0_NEW_3D_TEVLPROG |
719 NVC0_NEW_3D_GMTYPROG },
720 { nvc0_constbufs_validate, NVC0_NEW_3D_CONSTBUF },
721 { nvc0_validate_textures, NVC0_NEW_3D_TEXTURES },
722 { nvc0_validate_samplers, NVC0_NEW_3D_SAMPLERS },
723 { nve4_set_tex_handles, NVC0_NEW_3D_TEXTURES | NVC0_NEW_3D_SAMPLERS },
724 { nvc0_vertex_arrays_validate, NVC0_NEW_3D_VERTEX | NVC0_NEW_3D_ARRAYS },
725 { nvc0_validate_surfaces, NVC0_NEW_3D_SURFACES },
726 { nvc0_validate_buffers, NVC0_NEW_3D_BUFFERS },
727 { nvc0_idxbuf_validate, NVC0_NEW_3D_IDXBUF },
728 { nvc0_tfb_validate, NVC0_NEW_3D_TFB_TARGETS | NVC0_NEW_3D_GMTYPROG },
729 { nvc0_validate_min_samples, NVC0_NEW_3D_MIN_SAMPLES },
730 { nvc0_validate_driverconst, NVC0_NEW_3D_DRIVERCONST },
731 };
732
733 bool
734 nvc0_state_validate(struct nvc0_context *nvc0, uint32_t mask,
735 struct nvc0_state_validate *validate_list, int size,
736 uint32_t *dirty, struct nouveau_bufctx *bufctx)
737 {
738 uint32_t state_mask;
739 int ret;
740 unsigned i;
741
742 if (nvc0->screen->cur_ctx != nvc0)
743 nvc0_switch_pipe_context(nvc0);
744
745 state_mask = *dirty & mask;
746
747 if (state_mask) {
748 for (i = 0; i < size; ++i) {
749 struct nvc0_state_validate *validate = &validate_list[i];
750
751 if (state_mask & validate->states)
752 validate->func(nvc0);
753 }
754 *dirty &= ~state_mask;
755
756 nvc0_bufctx_fence(nvc0, bufctx, false);
757 }
758
759 nouveau_pushbuf_bufctx(nvc0->base.pushbuf, bufctx);
760 ret = nouveau_pushbuf_validate(nvc0->base.pushbuf);
761
762 return !ret;
763 }
764
765 bool
766 nvc0_state_validate_3d(struct nvc0_context *nvc0, uint32_t mask)
767 {
768 bool ret;
769
770 ret = nvc0_state_validate(nvc0, mask, validate_list_3d,
771 ARRAY_SIZE(validate_list_3d), &nvc0->dirty_3d,
772 nvc0->bufctx_3d);
773
774 if (unlikely(nvc0->state.flushed)) {
775 nvc0->state.flushed = false;
776 nvc0_bufctx_fence(nvc0, nvc0->bufctx_3d, true);
777 }
778 return ret;
779 }