Merge remote-tracking branch 'mesa-public/master' into vulkan
[mesa.git] / src / gallium / drivers / nouveau / nv50 / nv50_state_validate.c
1
2 #include "util/u_format.h"
3
4 #include "nv50/nv50_context.h"
5 #include "nv50/nv50_defs.xml.h"
6
7 static inline void
8 nv50_fb_set_null_rt(struct nouveau_pushbuf *push, unsigned i)
9 {
10 BEGIN_NV04(push, NV50_3D(RT_ADDRESS_HIGH(i)), 4);
11 PUSH_DATA (push, 0);
12 PUSH_DATA (push, 0);
13 PUSH_DATA (push, 0);
14 PUSH_DATA (push, 0);
15 BEGIN_NV04(push, NV50_3D(RT_HORIZ(i)), 2);
16 PUSH_DATA (push, 64);
17 PUSH_DATA (push, 0);
18 }
19
20 static void
21 nv50_validate_fb(struct nv50_context *nv50)
22 {
23 struct nouveau_pushbuf *push = nv50->base.pushbuf;
24 struct pipe_framebuffer_state *fb = &nv50->framebuffer;
25 unsigned i;
26 unsigned ms_mode = NV50_3D_MULTISAMPLE_MODE_MS1;
27 uint32_t array_size = 0xffff, array_mode = 0;
28
29 nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_FB);
30
31 BEGIN_NV04(push, NV50_3D(RT_CONTROL), 1);
32 PUSH_DATA (push, (076543210 << 4) | fb->nr_cbufs);
33 BEGIN_NV04(push, NV50_3D(SCREEN_SCISSOR_HORIZ), 2);
34 PUSH_DATA (push, fb->width << 16);
35 PUSH_DATA (push, fb->height << 16);
36
37 for (i = 0; i < fb->nr_cbufs; ++i) {
38 struct nv50_miptree *mt;
39 struct nv50_surface *sf;
40 struct nouveau_bo *bo;
41
42 if (!fb->cbufs[i]) {
43 nv50_fb_set_null_rt(push, i);
44 continue;
45 }
46
47 mt = nv50_miptree(fb->cbufs[i]->texture);
48 sf = nv50_surface(fb->cbufs[i]);
49 bo = mt->base.bo;
50
51 array_size = MIN2(array_size, sf->depth);
52 if (mt->layout_3d)
53 array_mode = NV50_3D_RT_ARRAY_MODE_MODE_3D; /* 1 << 16 */
54
55 /* can't mix 3D with ARRAY or have RTs of different depth/array_size */
56 assert(mt->layout_3d || !array_mode || array_size == 1);
57
58 BEGIN_NV04(push, NV50_3D(RT_ADDRESS_HIGH(i)), 5);
59 PUSH_DATAh(push, mt->base.address + sf->offset);
60 PUSH_DATA (push, mt->base.address + sf->offset);
61 PUSH_DATA (push, nv50_format_table[sf->base.format].rt);
62 if (likely(nouveau_bo_memtype(bo))) {
63 PUSH_DATA (push, mt->level[sf->base.u.tex.level].tile_mode);
64 PUSH_DATA (push, mt->layer_stride >> 2);
65 BEGIN_NV04(push, NV50_3D(RT_HORIZ(i)), 2);
66 PUSH_DATA (push, sf->width);
67 PUSH_DATA (push, sf->height);
68 BEGIN_NV04(push, NV50_3D(RT_ARRAY_MODE), 1);
69 PUSH_DATA (push, array_mode | array_size);
70 nv50->rt_array_mode = array_mode | array_size;
71 } else {
72 PUSH_DATA (push, 0);
73 PUSH_DATA (push, 0);
74 BEGIN_NV04(push, NV50_3D(RT_HORIZ(i)), 2);
75 PUSH_DATA (push, NV50_3D_RT_HORIZ_LINEAR | mt->level[0].pitch);
76 PUSH_DATA (push, sf->height);
77 BEGIN_NV04(push, NV50_3D(RT_ARRAY_MODE), 1);
78 PUSH_DATA (push, 0);
79
80 assert(!fb->zsbuf);
81 assert(!mt->ms_mode);
82 }
83
84 ms_mode = mt->ms_mode;
85
86 if (mt->base.status & NOUVEAU_BUFFER_STATUS_GPU_READING)
87 nv50->state.rt_serialize = true;
88 mt->base.status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
89 mt->base.status &= ~NOUVEAU_BUFFER_STATUS_GPU_READING;
90
91 /* only register for writing, otherwise we'd always serialize here */
92 BCTX_REFN(nv50->bufctx_3d, FB, &mt->base, WR);
93 }
94
95 if (fb->zsbuf) {
96 struct nv50_miptree *mt = nv50_miptree(fb->zsbuf->texture);
97 struct nv50_surface *sf = nv50_surface(fb->zsbuf);
98 int unk = mt->base.base.target == PIPE_TEXTURE_3D || sf->depth == 1;
99
100 BEGIN_NV04(push, NV50_3D(ZETA_ADDRESS_HIGH), 5);
101 PUSH_DATAh(push, mt->base.address + sf->offset);
102 PUSH_DATA (push, mt->base.address + sf->offset);
103 PUSH_DATA (push, nv50_format_table[fb->zsbuf->format].rt);
104 PUSH_DATA (push, mt->level[sf->base.u.tex.level].tile_mode);
105 PUSH_DATA (push, mt->layer_stride >> 2);
106 BEGIN_NV04(push, NV50_3D(ZETA_ENABLE), 1);
107 PUSH_DATA (push, 1);
108 BEGIN_NV04(push, NV50_3D(ZETA_HORIZ), 3);
109 PUSH_DATA (push, sf->width);
110 PUSH_DATA (push, sf->height);
111 PUSH_DATA (push, (unk << 16) | sf->depth);
112
113 ms_mode = mt->ms_mode;
114
115 if (mt->base.status & NOUVEAU_BUFFER_STATUS_GPU_READING)
116 nv50->state.rt_serialize = true;
117 mt->base.status |= NOUVEAU_BUFFER_STATUS_GPU_WRITING;
118 mt->base.status &= ~NOUVEAU_BUFFER_STATUS_GPU_READING;
119
120 BCTX_REFN(nv50->bufctx_3d, FB, &mt->base, WR);
121 } else {
122 BEGIN_NV04(push, NV50_3D(ZETA_ENABLE), 1);
123 PUSH_DATA (push, 0);
124 }
125
126 BEGIN_NV04(push, NV50_3D(MULTISAMPLE_MODE), 1);
127 PUSH_DATA (push, ms_mode);
128
129 /* Only need to initialize the first viewport, which is used for clears */
130 BEGIN_NV04(push, NV50_3D(VIEWPORT_HORIZ(0)), 2);
131 PUSH_DATA (push, fb->width << 16);
132 PUSH_DATA (push, fb->height << 16);
133
134 if (nv50->screen->tesla->oclass >= NVA3_3D_CLASS) {
135 unsigned ms = 1 << ms_mode;
136 BEGIN_NV04(push, NV50_3D(CB_ADDR), 1);
137 PUSH_DATA (push, (NV50_CB_AUX_SAMPLE_OFFSET << (8 - 2)) | NV50_CB_AUX);
138 BEGIN_NI04(push, NV50_3D(CB_DATA(0)), 2 * ms);
139 for (i = 0; i < ms; i++) {
140 float xy[2];
141 nv50->base.pipe.get_sample_position(&nv50->base.pipe, ms, i, xy);
142 PUSH_DATAf(push, xy[0]);
143 PUSH_DATAf(push, xy[1]);
144 }
145 }
146 }
147
148 static void
149 nv50_validate_blend_colour(struct nv50_context *nv50)
150 {
151 struct nouveau_pushbuf *push = nv50->base.pushbuf;
152
153 BEGIN_NV04(push, NV50_3D(BLEND_COLOR(0)), 4);
154 PUSH_DATAf(push, nv50->blend_colour.color[0]);
155 PUSH_DATAf(push, nv50->blend_colour.color[1]);
156 PUSH_DATAf(push, nv50->blend_colour.color[2]);
157 PUSH_DATAf(push, nv50->blend_colour.color[3]);
158 }
159
160 static void
161 nv50_validate_stencil_ref(struct nv50_context *nv50)
162 {
163 struct nouveau_pushbuf *push = nv50->base.pushbuf;
164
165 BEGIN_NV04(push, NV50_3D(STENCIL_FRONT_FUNC_REF), 1);
166 PUSH_DATA (push, nv50->stencil_ref.ref_value[0]);
167 BEGIN_NV04(push, NV50_3D(STENCIL_BACK_FUNC_REF), 1);
168 PUSH_DATA (push, nv50->stencil_ref.ref_value[1]);
169 }
170
171 static void
172 nv50_validate_stipple(struct nv50_context *nv50)
173 {
174 struct nouveau_pushbuf *push = nv50->base.pushbuf;
175 unsigned i;
176
177 BEGIN_NV04(push, NV50_3D(POLYGON_STIPPLE_PATTERN(0)), 32);
178 for (i = 0; i < 32; ++i)
179 PUSH_DATA(push, util_bswap32(nv50->stipple.stipple[i]));
180 }
181
182 static void
183 nv50_validate_scissor(struct nv50_context *nv50)
184 {
185 struct nouveau_pushbuf *push = nv50->base.pushbuf;
186 #ifdef NV50_SCISSORS_CLIPPING
187 int minx, maxx, miny, maxy, i;
188
189 if (!(nv50->dirty &
190 (NV50_NEW_SCISSOR | NV50_NEW_VIEWPORT | NV50_NEW_FRAMEBUFFER)) &&
191 nv50->state.scissor == nv50->rast->pipe.scissor)
192 return;
193
194 if (nv50->state.scissor != nv50->rast->pipe.scissor)
195 nv50->scissors_dirty = (1 << NV50_MAX_VIEWPORTS) - 1;
196
197 nv50->state.scissor = nv50->rast->pipe.scissor;
198
199 if ((nv50->dirty & NV50_NEW_FRAMEBUFFER) && !nv50->state.scissor)
200 nv50->scissors_dirty = (1 << NV50_MAX_VIEWPORTS) - 1;
201
202 for (i = 0; i < NV50_MAX_VIEWPORTS; i++) {
203 struct pipe_scissor_state *s = &nv50->scissors[i];
204 struct pipe_viewport_state *vp = &nv50->viewports[i];
205
206 if (!(nv50->scissors_dirty & (1 << i)) &&
207 !(nv50->viewports_dirty & (1 << i)))
208 continue;
209
210 if (nv50->state.scissor) {
211 minx = s->minx;
212 maxx = s->maxx;
213 miny = s->miny;
214 maxy = s->maxy;
215 } else {
216 minx = 0;
217 maxx = nv50->framebuffer.width;
218 miny = 0;
219 maxy = nv50->framebuffer.height;
220 }
221
222 minx = MAX2(minx, (int)(vp->translate[0] - fabsf(vp->scale[0])));
223 maxx = MIN2(maxx, (int)(vp->translate[0] + fabsf(vp->scale[0])));
224 miny = MAX2(miny, (int)(vp->translate[1] - fabsf(vp->scale[1])));
225 maxy = MIN2(maxy, (int)(vp->translate[1] + fabsf(vp->scale[1])));
226
227 minx = MIN2(minx, 8192);
228 maxx = MAX2(maxx, 0);
229 miny = MIN2(miny, 8192);
230 maxy = MAX2(maxy, 0);
231
232 BEGIN_NV04(push, NV50_3D(SCISSOR_HORIZ(i)), 2);
233 PUSH_DATA (push, (maxx << 16) | minx);
234 PUSH_DATA (push, (maxy << 16) | miny);
235 #else
236 BEGIN_NV04(push, NV50_3D(SCISSOR_HORIZ(i)), 2);
237 PUSH_DATA (push, (s->maxx << 16) | s->minx);
238 PUSH_DATA (push, (s->maxy << 16) | s->miny);
239 #endif
240 }
241
242 nv50->scissors_dirty = 0;
243 }
244
245 static void
246 nv50_validate_viewport(struct nv50_context *nv50)
247 {
248 struct nouveau_pushbuf *push = nv50->base.pushbuf;
249 float zmin, zmax;
250 int i;
251
252 for (i = 0; i < NV50_MAX_VIEWPORTS; i++) {
253 struct pipe_viewport_state *vpt = &nv50->viewports[i];
254
255 if (!(nv50->viewports_dirty & (1 << i)))
256 continue;
257
258 BEGIN_NV04(push, NV50_3D(VIEWPORT_TRANSLATE_X(i)), 3);
259 PUSH_DATAf(push, vpt->translate[0]);
260 PUSH_DATAf(push, vpt->translate[1]);
261 PUSH_DATAf(push, vpt->translate[2]);
262 BEGIN_NV04(push, NV50_3D(VIEWPORT_SCALE_X(i)), 3);
263 PUSH_DATAf(push, vpt->scale[0]);
264 PUSH_DATAf(push, vpt->scale[1]);
265 PUSH_DATAf(push, vpt->scale[2]);
266
267 zmin = vpt->translate[2] - fabsf(vpt->scale[2]);
268 zmax = vpt->translate[2] + fabsf(vpt->scale[2]);
269
270 #ifdef NV50_SCISSORS_CLIPPING
271 BEGIN_NV04(push, NV50_3D(DEPTH_RANGE_NEAR(i)), 2);
272 PUSH_DATAf(push, zmin);
273 PUSH_DATAf(push, zmax);
274 #endif
275 }
276
277 nv50->viewports_dirty = 0;
278 }
279
280 static inline void
281 nv50_check_program_ucps(struct nv50_context *nv50,
282 struct nv50_program *vp, uint8_t mask)
283 {
284 const unsigned n = util_logbase2(mask) + 1;
285
286 if (vp->vp.clpd_nr >= n)
287 return;
288 nv50_program_destroy(nv50, vp);
289
290 vp->vp.clpd_nr = n;
291 if (likely(vp == nv50->vertprog)) {
292 nv50->dirty |= NV50_NEW_VERTPROG;
293 nv50_vertprog_validate(nv50);
294 } else {
295 nv50->dirty |= NV50_NEW_GMTYPROG;
296 nv50_gmtyprog_validate(nv50);
297 }
298 nv50_fp_linkage_validate(nv50);
299 }
300
301 /* alpha test is disabled if there are no color RTs, so make sure we have at
302 * least one if alpha test is enabled. Note that this must run after
303 * nv50_validate_fb, otherwise that will override the RT count setting.
304 */
305 static void
306 nv50_validate_derived_2(struct nv50_context *nv50)
307 {
308 struct nouveau_pushbuf *push = nv50->base.pushbuf;
309
310 if (nv50->zsa && nv50->zsa->pipe.alpha.enabled &&
311 nv50->framebuffer.nr_cbufs == 0) {
312 nv50_fb_set_null_rt(push, 0);
313 BEGIN_NV04(push, NV50_3D(RT_CONTROL), 1);
314 PUSH_DATA (push, (076543210 << 4) | 1);
315 }
316 }
317
318 static void
319 nv50_validate_derived_3(struct nv50_context *nv50)
320 {
321 struct nouveau_pushbuf *push = nv50->base.pushbuf;
322 struct pipe_framebuffer_state *fb = &nv50->framebuffer;
323 uint32_t ms = 0;
324
325 if ((!fb->nr_cbufs || !fb->cbufs[0] ||
326 !util_format_is_pure_integer(fb->cbufs[0]->format)) && nv50->blend) {
327 if (nv50->blend->pipe.alpha_to_coverage)
328 ms |= NV50_3D_MULTISAMPLE_CTRL_ALPHA_TO_COVERAGE;
329 if (nv50->blend->pipe.alpha_to_one)
330 ms |= NV50_3D_MULTISAMPLE_CTRL_ALPHA_TO_ONE;
331 }
332
333 BEGIN_NV04(push, NV50_3D(MULTISAMPLE_CTRL), 1);
334 PUSH_DATA (push, ms);
335 }
336
337 static void
338 nv50_validate_clip(struct nv50_context *nv50)
339 {
340 struct nouveau_pushbuf *push = nv50->base.pushbuf;
341 struct nv50_program *vp;
342 uint8_t clip_enable;
343
344 if (nv50->dirty & NV50_NEW_CLIP) {
345 BEGIN_NV04(push, NV50_3D(CB_ADDR), 1);
346 PUSH_DATA (push, (NV50_CB_AUX_UCP_OFFSET << 8) | NV50_CB_AUX);
347 BEGIN_NI04(push, NV50_3D(CB_DATA(0)), PIPE_MAX_CLIP_PLANES * 4);
348 PUSH_DATAp(push, &nv50->clip.ucp[0][0], PIPE_MAX_CLIP_PLANES * 4);
349 }
350
351 vp = nv50->gmtyprog;
352 if (likely(!vp))
353 vp = nv50->vertprog;
354
355 clip_enable = nv50->rast->pipe.clip_plane_enable;
356
357 BEGIN_NV04(push, NV50_3D(CLIP_DISTANCE_ENABLE), 1);
358 PUSH_DATA (push, clip_enable);
359
360 if (clip_enable)
361 nv50_check_program_ucps(nv50, vp, clip_enable);
362 }
363
364 static void
365 nv50_validate_blend(struct nv50_context *nv50)
366 {
367 struct nouveau_pushbuf *push = nv50->base.pushbuf;
368
369 PUSH_SPACE(push, nv50->blend->size);
370 PUSH_DATAp(push, nv50->blend->state, nv50->blend->size);
371 }
372
373 static void
374 nv50_validate_zsa(struct nv50_context *nv50)
375 {
376 struct nouveau_pushbuf *push = nv50->base.pushbuf;
377
378 PUSH_SPACE(push, nv50->zsa->size);
379 PUSH_DATAp(push, nv50->zsa->state, nv50->zsa->size);
380 }
381
382 static void
383 nv50_validate_rasterizer(struct nv50_context *nv50)
384 {
385 struct nouveau_pushbuf *push = nv50->base.pushbuf;
386
387 PUSH_SPACE(push, nv50->rast->size);
388 PUSH_DATAp(push, nv50->rast->state, nv50->rast->size);
389 }
390
391 static void
392 nv50_validate_sample_mask(struct nv50_context *nv50)
393 {
394 struct nouveau_pushbuf *push = nv50->base.pushbuf;
395
396 unsigned mask[4] =
397 {
398 nv50->sample_mask & 0xffff,
399 nv50->sample_mask & 0xffff,
400 nv50->sample_mask & 0xffff,
401 nv50->sample_mask & 0xffff
402 };
403
404 BEGIN_NV04(push, NV50_3D(MSAA_MASK(0)), 4);
405 PUSH_DATA (push, mask[0]);
406 PUSH_DATA (push, mask[1]);
407 PUSH_DATA (push, mask[2]);
408 PUSH_DATA (push, mask[3]);
409 }
410
411 static void
412 nv50_validate_min_samples(struct nv50_context *nv50)
413 {
414 struct nouveau_pushbuf *push = nv50->base.pushbuf;
415 int samples;
416
417 if (nv50->screen->tesla->oclass < NVA3_3D_CLASS)
418 return;
419
420 samples = util_next_power_of_two(nv50->min_samples);
421 if (samples > 1)
422 samples |= NVA3_3D_SAMPLE_SHADING_ENABLE;
423
424 BEGIN_NV04(push, SUBC_3D(NVA3_3D_SAMPLE_SHADING), 1);
425 PUSH_DATA (push, samples);
426 }
427
428 static void
429 nv50_switch_pipe_context(struct nv50_context *ctx_to)
430 {
431 struct nv50_context *ctx_from = ctx_to->screen->cur_ctx;
432
433 if (ctx_from)
434 ctx_to->state = ctx_from->state;
435 else
436 ctx_to->state = ctx_to->screen->save_state;
437
438 ctx_to->dirty = ~0;
439 ctx_to->viewports_dirty = ~0;
440 ctx_to->scissors_dirty = ~0;
441
442 ctx_to->constbuf_dirty[0] =
443 ctx_to->constbuf_dirty[1] =
444 ctx_to->constbuf_dirty[2] = (1 << NV50_MAX_PIPE_CONSTBUFS) - 1;
445
446 if (!ctx_to->vertex)
447 ctx_to->dirty &= ~(NV50_NEW_VERTEX | NV50_NEW_ARRAYS);
448
449 if (!ctx_to->vertprog)
450 ctx_to->dirty &= ~NV50_NEW_VERTPROG;
451 if (!ctx_to->fragprog)
452 ctx_to->dirty &= ~NV50_NEW_FRAGPROG;
453
454 if (!ctx_to->blend)
455 ctx_to->dirty &= ~NV50_NEW_BLEND;
456 if (!ctx_to->rast)
457 #ifdef NV50_SCISSORS_CLIPPING
458 ctx_to->dirty &= ~(NV50_NEW_RASTERIZER | NV50_NEW_SCISSOR);
459 #else
460 ctx_to->dirty &= ~NV50_NEW_RASTERIZER;
461 #endif
462 if (!ctx_to->zsa)
463 ctx_to->dirty &= ~NV50_NEW_ZSA;
464
465 ctx_to->screen->cur_ctx = ctx_to;
466 }
467
468 static struct state_validate {
469 void (*func)(struct nv50_context *);
470 uint32_t states;
471 } validate_list[] = {
472 { nv50_validate_fb, NV50_NEW_FRAMEBUFFER },
473 { nv50_validate_blend, NV50_NEW_BLEND },
474 { nv50_validate_zsa, NV50_NEW_ZSA },
475 { nv50_validate_sample_mask, NV50_NEW_SAMPLE_MASK },
476 { nv50_validate_rasterizer, NV50_NEW_RASTERIZER },
477 { nv50_validate_blend_colour, NV50_NEW_BLEND_COLOUR },
478 { nv50_validate_stencil_ref, NV50_NEW_STENCIL_REF },
479 { nv50_validate_stipple, NV50_NEW_STIPPLE },
480 #ifdef NV50_SCISSORS_CLIPPING
481 { nv50_validate_scissor, NV50_NEW_SCISSOR | NV50_NEW_VIEWPORT |
482 NV50_NEW_RASTERIZER |
483 NV50_NEW_FRAMEBUFFER },
484 #else
485 { nv50_validate_scissor, NV50_NEW_SCISSOR },
486 #endif
487 { nv50_validate_viewport, NV50_NEW_VIEWPORT },
488 { nv50_vertprog_validate, NV50_NEW_VERTPROG },
489 { nv50_gmtyprog_validate, NV50_NEW_GMTYPROG },
490 { nv50_fragprog_validate, NV50_NEW_FRAGPROG | NV50_NEW_RASTERIZER |
491 NV50_NEW_MIN_SAMPLES },
492 { nv50_fp_linkage_validate, NV50_NEW_FRAGPROG | NV50_NEW_VERTPROG |
493 NV50_NEW_GMTYPROG | NV50_NEW_RASTERIZER },
494 { nv50_gp_linkage_validate, NV50_NEW_GMTYPROG | NV50_NEW_VERTPROG },
495 { nv50_validate_derived_rs, NV50_NEW_FRAGPROG | NV50_NEW_RASTERIZER |
496 NV50_NEW_VERTPROG | NV50_NEW_GMTYPROG },
497 { nv50_validate_derived_2, NV50_NEW_ZSA | NV50_NEW_FRAMEBUFFER },
498 { nv50_validate_derived_3, NV50_NEW_BLEND | NV50_NEW_FRAMEBUFFER },
499 { nv50_validate_clip, NV50_NEW_CLIP | NV50_NEW_RASTERIZER |
500 NV50_NEW_VERTPROG | NV50_NEW_GMTYPROG },
501 { nv50_constbufs_validate, NV50_NEW_CONSTBUF },
502 { nv50_validate_textures, NV50_NEW_TEXTURES },
503 { nv50_validate_samplers, NV50_NEW_SAMPLERS },
504 { nv50_stream_output_validate, NV50_NEW_STRMOUT |
505 NV50_NEW_VERTPROG | NV50_NEW_GMTYPROG },
506 { nv50_vertex_arrays_validate, NV50_NEW_VERTEX | NV50_NEW_ARRAYS |
507 NV50_NEW_VERTPROG },
508 { nv50_validate_min_samples, NV50_NEW_MIN_SAMPLES },
509 };
510 #define validate_list_len (sizeof(validate_list) / sizeof(validate_list[0]))
511
512 bool
513 nv50_state_validate(struct nv50_context *nv50, uint32_t mask, unsigned words)
514 {
515 uint32_t state_mask;
516 int ret;
517 unsigned i;
518
519 if (nv50->screen->cur_ctx != nv50)
520 nv50_switch_pipe_context(nv50);
521
522 state_mask = nv50->dirty & mask;
523
524 if (state_mask) {
525 for (i = 0; i < validate_list_len; ++i) {
526 struct state_validate *validate = &validate_list[i];
527
528 if (state_mask & validate->states)
529 validate->func(nv50);
530 }
531 nv50->dirty &= ~state_mask;
532
533 if (nv50->state.rt_serialize) {
534 nv50->state.rt_serialize = false;
535 BEGIN_NV04(nv50->base.pushbuf, SUBC_3D(NV50_GRAPH_SERIALIZE), 1);
536 PUSH_DATA (nv50->base.pushbuf, 0);
537 }
538
539 nv50_bufctx_fence(nv50->bufctx_3d, false);
540 }
541 nouveau_pushbuf_bufctx(nv50->base.pushbuf, nv50->bufctx_3d);
542 ret = nouveau_pushbuf_validate(nv50->base.pushbuf);
543
544 if (unlikely(nv50->state.flushed)) {
545 nv50->state.flushed = false;
546 nv50_bufctx_fence(nv50->bufctx_3d, true);
547 }
548 return !ret;
549 }