2 #include "util/u_format.h"
3 #include "util/u_math.h"
5 #include "nvc0/nvc0_context.h"
9 nvc0_validate_zcull(struct nvc0_context
*nvc0
)
11 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
12 struct pipe_framebuffer_state
*fb
= &nvc0
->framebuffer
;
13 struct nv50_surface
*sf
= nv50_surface(fb
->zsbuf
);
14 struct nv50_miptree
*mt
= nv50_miptree(sf
->base
.texture
);
15 struct nouveau_bo
*bo
= mt
->base
.bo
;
17 uint32_t offset
= align(mt
->total_size
, 1 << 17);
18 unsigned width
, height
;
20 assert(mt
->base
.base
.depth0
== 1 && mt
->base
.base
.array_size
< 2);
22 size
= mt
->total_size
* 2;
24 height
= align(fb
->height
, 32);
25 width
= fb
->width
% 224;
27 width
= fb
->width
+ (224 - width
);
31 BEGIN_NVC0(push
, NVC0_3D(ZCULL_REGION
), 1);
33 BEGIN_NVC0(push
, NVC0_3D(ZCULL_ADDRESS_HIGH
), 2);
34 PUSH_DATAh(push
, bo
->offset
+ offset
);
35 PUSH_DATA (push
, bo
->offset
+ offset
);
37 BEGIN_NVC0(push
, NVC0_3D(ZCULL_LIMIT_HIGH
), 2);
38 PUSH_DATAh(push
, bo
->offset
+ offset
);
39 PUSH_DATA (push
, bo
->offset
+ offset
);
40 BEGIN_NVC0(push
, SUBC_3D(0x07e0), 2);
41 PUSH_DATA (push
, size
);
42 PUSH_DATA (push
, size
>> 16);
43 BEGIN_NVC0(push
, SUBC_3D(0x15c8), 1); /* bits 0x3 */
45 BEGIN_NVC0(push
, NVC0_3D(ZCULL_WIDTH
), 4);
46 PUSH_DATA (push
, width
);
47 PUSH_DATA (push
, height
);
50 BEGIN_NVC0(push
, NVC0_3D(ZCULL_WINDOW_OFFSET_X
), 2);
53 BEGIN_NVC0(push
, NVC0_3D(ZCULL_INVALIDATE
), 1);
59 nvc0_fb_set_null_rt(struct nouveau_pushbuf
*push
, unsigned i
)
61 BEGIN_NVC0(push
, NVC0_3D(RT_ADDRESS_HIGH(i
)), 6);
71 nvc0_validate_fb(struct nvc0_context
*nvc0
)
73 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
74 struct pipe_framebuffer_state
*fb
= &nvc0
->framebuffer
;
75 struct nvc0_screen
*screen
= nvc0
->screen
;
77 unsigned ms_mode
= NVC0_3D_MULTISAMPLE_MODE_MS1
;
78 bool serialize
= false;
80 nouveau_bufctx_reset(nvc0
->bufctx_3d
, NVC0_BIND_3D_FB
);
82 BEGIN_NVC0(push
, NVC0_3D(RT_CONTROL
), 1);
83 PUSH_DATA (push
, (076543210 << 4) | fb
->nr_cbufs
);
84 BEGIN_NVC0(push
, NVC0_3D(SCREEN_SCISSOR_HORIZ
), 2);
85 PUSH_DATA (push
, fb
->width
<< 16);
86 PUSH_DATA (push
, fb
->height
<< 16);
88 for (i
= 0; i
< fb
->nr_cbufs
; ++i
) {
89 struct nv50_surface
*sf
;
90 struct nv04_resource
*res
;
91 struct nouveau_bo
*bo
;
94 nvc0_fb_set_null_rt(push
, i
);
98 sf
= nv50_surface(fb
->cbufs
[i
]);
99 res
= nv04_resource(sf
->base
.texture
);
102 BEGIN_NVC0(push
, NVC0_3D(RT_ADDRESS_HIGH(i
)), 9);
103 PUSH_DATAh(push
, res
->address
+ sf
->offset
);
104 PUSH_DATA (push
, res
->address
+ sf
->offset
);
105 if (likely(nouveau_bo_memtype(bo
))) {
106 struct nv50_miptree
*mt
= nv50_miptree(sf
->base
.texture
);
108 assert(sf
->base
.texture
->target
!= PIPE_BUFFER
);
110 PUSH_DATA(push
, sf
->width
);
111 PUSH_DATA(push
, sf
->height
);
112 PUSH_DATA(push
, nvc0_format_table
[sf
->base
.format
].rt
);
113 PUSH_DATA(push
, (mt
->layout_3d
<< 16) |
114 mt
->level
[sf
->base
.u
.tex
.level
].tile_mode
);
115 PUSH_DATA(push
, sf
->base
.u
.tex
.first_layer
+ sf
->depth
);
116 PUSH_DATA(push
, mt
->layer_stride
>> 2);
117 PUSH_DATA(push
, sf
->base
.u
.tex
.first_layer
);
119 ms_mode
= mt
->ms_mode
;
121 if (res
->base
.target
== PIPE_BUFFER
) {
122 PUSH_DATA(push
, 262144);
125 PUSH_DATA(push
, nv50_miptree(sf
->base
.texture
)->level
[0].pitch
);
126 PUSH_DATA(push
, sf
->height
);
128 PUSH_DATA(push
, nvc0_format_table
[sf
->base
.format
].rt
);
129 PUSH_DATA(push
, 1 << 12);
134 nvc0_resource_fence(res
, NOUVEAU_BO_WR
);
139 if (res
->status
& NOUVEAU_BUFFER_STATUS_GPU_READING
)
141 res
->status
|= NOUVEAU_BUFFER_STATUS_GPU_WRITING
;
142 res
->status
&= ~NOUVEAU_BUFFER_STATUS_GPU_READING
;
144 /* only register for writing, otherwise we'd always serialize here */
145 BCTX_REFN(nvc0
->bufctx_3d
, 3D_FB
, res
, WR
);
149 struct nv50_miptree
*mt
= nv50_miptree(fb
->zsbuf
->texture
);
150 struct nv50_surface
*sf
= nv50_surface(fb
->zsbuf
);
151 int unk
= mt
->base
.base
.target
== PIPE_TEXTURE_2D
;
153 BEGIN_NVC0(push
, NVC0_3D(ZETA_ADDRESS_HIGH
), 5);
154 PUSH_DATAh(push
, mt
->base
.address
+ sf
->offset
);
155 PUSH_DATA (push
, mt
->base
.address
+ sf
->offset
);
156 PUSH_DATA (push
, nvc0_format_table
[fb
->zsbuf
->format
].rt
);
157 PUSH_DATA (push
, mt
->level
[sf
->base
.u
.tex
.level
].tile_mode
);
158 PUSH_DATA (push
, mt
->layer_stride
>> 2);
159 BEGIN_NVC0(push
, NVC0_3D(ZETA_ENABLE
), 1);
161 BEGIN_NVC0(push
, NVC0_3D(ZETA_HORIZ
), 3);
162 PUSH_DATA (push
, sf
->width
);
163 PUSH_DATA (push
, sf
->height
);
164 PUSH_DATA (push
, (unk
<< 16) |
165 (sf
->base
.u
.tex
.first_layer
+ sf
->depth
));
166 BEGIN_NVC0(push
, NVC0_3D(ZETA_BASE_LAYER
), 1);
167 PUSH_DATA (push
, sf
->base
.u
.tex
.first_layer
);
169 ms_mode
= mt
->ms_mode
;
171 if (mt
->base
.status
& NOUVEAU_BUFFER_STATUS_GPU_READING
)
173 mt
->base
.status
|= NOUVEAU_BUFFER_STATUS_GPU_WRITING
;
174 mt
->base
.status
&= ~NOUVEAU_BUFFER_STATUS_GPU_READING
;
176 BCTX_REFN(nvc0
->bufctx_3d
, 3D_FB
, &mt
->base
, WR
);
178 BEGIN_NVC0(push
, NVC0_3D(ZETA_ENABLE
), 1);
182 IMMED_NVC0(push
, NVC0_3D(MULTISAMPLE_MODE
), ms_mode
);
185 BEGIN_NVC0(push
, NVC0_3D(CB_SIZE
), 3);
186 PUSH_DATA (push
, 1024);
187 PUSH_DATAh(push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(4));
188 PUSH_DATA (push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(4));
189 BEGIN_1IC0(push
, NVC0_3D(CB_POS
), 1 + 2 * ms
);
190 PUSH_DATA (push
, NVC0_CB_AUX_SAMPLE_INFO
);
191 for (i
= 0; i
< ms
; i
++) {
193 nvc0
->base
.pipe
.get_sample_position(&nvc0
->base
.pipe
, ms
, i
, xy
);
194 PUSH_DATAf(push
, xy
[0]);
195 PUSH_DATAf(push
, xy
[1]);
199 IMMED_NVC0(push
, NVC0_3D(SERIALIZE
), 0);
201 NOUVEAU_DRV_STAT(&nvc0
->screen
->base
, gpu_serialize_count
, serialize
);
205 nvc0_validate_blend_colour(struct nvc0_context
*nvc0
)
207 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
209 BEGIN_NVC0(push
, NVC0_3D(BLEND_COLOR(0)), 4);
210 PUSH_DATAf(push
, nvc0
->blend_colour
.color
[0]);
211 PUSH_DATAf(push
, nvc0
->blend_colour
.color
[1]);
212 PUSH_DATAf(push
, nvc0
->blend_colour
.color
[2]);
213 PUSH_DATAf(push
, nvc0
->blend_colour
.color
[3]);
217 nvc0_validate_stencil_ref(struct nvc0_context
*nvc0
)
219 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
220 const ubyte
*ref
= &nvc0
->stencil_ref
.ref_value
[0];
222 IMMED_NVC0(push
, NVC0_3D(STENCIL_FRONT_FUNC_REF
), ref
[0]);
223 IMMED_NVC0(push
, NVC0_3D(STENCIL_BACK_FUNC_REF
), ref
[1]);
227 nvc0_validate_stipple(struct nvc0_context
*nvc0
)
229 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
232 BEGIN_NVC0(push
, NVC0_3D(POLYGON_STIPPLE_PATTERN(0)), 32);
233 for (i
= 0; i
< 32; ++i
)
234 PUSH_DATA(push
, util_bswap32(nvc0
->stipple
.stipple
[i
]));
238 nvc0_validate_scissor(struct nvc0_context
*nvc0
)
241 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
243 if (!(nvc0
->dirty_3d
& NVC0_NEW_3D_SCISSOR
) &&
244 nvc0
->rast
->pipe
.scissor
== nvc0
->state
.scissor
)
247 if (nvc0
->state
.scissor
!= nvc0
->rast
->pipe
.scissor
)
248 nvc0
->scissors_dirty
= (1 << NVC0_MAX_VIEWPORTS
) - 1;
250 nvc0
->state
.scissor
= nvc0
->rast
->pipe
.scissor
;
252 for (i
= 0; i
< NVC0_MAX_VIEWPORTS
; i
++) {
253 struct pipe_scissor_state
*s
= &nvc0
->scissors
[i
];
254 if (!(nvc0
->scissors_dirty
& (1 << i
)))
257 BEGIN_NVC0(push
, NVC0_3D(SCISSOR_HORIZ(i
)), 2);
258 if (nvc0
->rast
->pipe
.scissor
) {
259 PUSH_DATA(push
, (s
->maxx
<< 16) | s
->minx
);
260 PUSH_DATA(push
, (s
->maxy
<< 16) | s
->miny
);
262 PUSH_DATA(push
, (0xffff << 16) | 0);
263 PUSH_DATA(push
, (0xffff << 16) | 0);
266 nvc0
->scissors_dirty
= 0;
270 nvc0_validate_viewport(struct nvc0_context
*nvc0
)
272 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
276 for (i
= 0; i
< NVC0_MAX_VIEWPORTS
; i
++) {
277 struct pipe_viewport_state
*vp
= &nvc0
->viewports
[i
];
279 if (!(nvc0
->viewports_dirty
& (1 << i
)))
282 BEGIN_NVC0(push
, NVC0_3D(VIEWPORT_TRANSLATE_X(i
)), 3);
283 PUSH_DATAf(push
, vp
->translate
[0]);
284 PUSH_DATAf(push
, vp
->translate
[1]);
285 PUSH_DATAf(push
, vp
->translate
[2]);
287 BEGIN_NVC0(push
, NVC0_3D(VIEWPORT_SCALE_X(i
)), 3);
288 PUSH_DATAf(push
, vp
->scale
[0]);
289 PUSH_DATAf(push
, vp
->scale
[1]);
290 PUSH_DATAf(push
, vp
->scale
[2]);
292 /* now set the viewport rectangle to viewport dimensions for clipping */
294 x
= util_iround(MAX2(0.0f
, vp
->translate
[0] - fabsf(vp
->scale
[0])));
295 y
= util_iround(MAX2(0.0f
, vp
->translate
[1] - fabsf(vp
->scale
[1])));
296 w
= util_iround(vp
->translate
[0] + fabsf(vp
->scale
[0])) - x
;
297 h
= util_iround(vp
->translate
[1] + fabsf(vp
->scale
[1])) - y
;
299 BEGIN_NVC0(push
, NVC0_3D(VIEWPORT_HORIZ(i
)), 2);
300 PUSH_DATA (push
, (w
<< 16) | x
);
301 PUSH_DATA (push
, (h
<< 16) | y
);
303 zmin
= vp
->translate
[2] - fabsf(vp
->scale
[2]);
304 zmax
= vp
->translate
[2] + fabsf(vp
->scale
[2]);
306 BEGIN_NVC0(push
, NVC0_3D(DEPTH_RANGE_NEAR(i
)), 2);
307 PUSH_DATAf(push
, zmin
);
308 PUSH_DATAf(push
, zmax
);
310 nvc0
->viewports_dirty
= 0;
314 nvc0_upload_uclip_planes(struct nvc0_context
*nvc0
, unsigned s
)
316 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
317 struct nvc0_screen
*screen
= nvc0
->screen
;
319 BEGIN_NVC0(push
, NVC0_3D(CB_SIZE
), 3);
320 PUSH_DATA (push
, 1024);
321 PUSH_DATAh(push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(s
));
322 PUSH_DATA (push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(s
));
323 BEGIN_1IC0(push
, NVC0_3D(CB_POS
), PIPE_MAX_CLIP_PLANES
* 4 + 1);
324 PUSH_DATA (push
, NVC0_CB_AUX_UCP_INFO
);
325 PUSH_DATAp(push
, &nvc0
->clip
.ucp
[0][0], PIPE_MAX_CLIP_PLANES
* 4);
329 nvc0_check_program_ucps(struct nvc0_context
*nvc0
,
330 struct nvc0_program
*vp
, uint8_t mask
)
332 const unsigned n
= util_logbase2(mask
) + 1;
334 if (vp
->vp
.num_ucps
>= n
)
336 nvc0_program_destroy(nvc0
, vp
);
339 if (likely(vp
== nvc0
->vertprog
))
340 nvc0_vertprog_validate(nvc0
);
342 if (likely(vp
== nvc0
->gmtyprog
))
343 nvc0_gmtyprog_validate(nvc0
);
345 nvc0_tevlprog_validate(nvc0
);
349 nvc0_validate_clip(struct nvc0_context
*nvc0
)
351 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
352 struct nvc0_program
*vp
;
354 uint8_t clip_enable
= nvc0
->rast
->pipe
.clip_plane_enable
;
356 if (nvc0
->gmtyprog
) {
360 if (nvc0
->tevlprog
) {
368 if (clip_enable
&& vp
->vp
.num_ucps
< PIPE_MAX_CLIP_PLANES
)
369 nvc0_check_program_ucps(nvc0
, vp
, clip_enable
);
371 if (nvc0
->dirty_3d
& (NVC0_NEW_3D_CLIP
| (NVC0_NEW_3D_VERTPROG
<< stage
)))
372 if (vp
->vp
.num_ucps
> 0 && vp
->vp
.num_ucps
<= PIPE_MAX_CLIP_PLANES
)
373 nvc0_upload_uclip_planes(nvc0
, stage
);
375 clip_enable
&= vp
->vp
.clip_enable
;
377 if (nvc0
->state
.clip_enable
!= clip_enable
) {
378 nvc0
->state
.clip_enable
= clip_enable
;
379 IMMED_NVC0(push
, NVC0_3D(CLIP_DISTANCE_ENABLE
), clip_enable
);
381 if (nvc0
->state
.clip_mode
!= vp
->vp
.clip_mode
) {
382 nvc0
->state
.clip_mode
= vp
->vp
.clip_mode
;
383 BEGIN_NVC0(push
, NVC0_3D(CLIP_DISTANCE_MODE
), 1);
384 PUSH_DATA (push
, vp
->vp
.clip_mode
);
389 nvc0_validate_blend(struct nvc0_context
*nvc0
)
391 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
393 PUSH_SPACE(push
, nvc0
->blend
->size
);
394 PUSH_DATAp(push
, nvc0
->blend
->state
, nvc0
->blend
->size
);
398 nvc0_validate_zsa(struct nvc0_context
*nvc0
)
400 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
402 PUSH_SPACE(push
, nvc0
->zsa
->size
);
403 PUSH_DATAp(push
, nvc0
->zsa
->state
, nvc0
->zsa
->size
);
407 nvc0_validate_rasterizer(struct nvc0_context
*nvc0
)
409 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
411 PUSH_SPACE(push
, nvc0
->rast
->size
);
412 PUSH_DATAp(push
, nvc0
->rast
->state
, nvc0
->rast
->size
);
416 nvc0_constbufs_validate(struct nvc0_context
*nvc0
)
418 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
421 for (s
= 0; s
< 5; ++s
) {
422 while (nvc0
->constbuf_dirty
[s
]) {
423 int i
= ffs(nvc0
->constbuf_dirty
[s
]) - 1;
424 nvc0
->constbuf_dirty
[s
] &= ~(1 << i
);
426 if (nvc0
->constbuf
[s
][i
].user
) {
427 struct nouveau_bo
*bo
= nvc0
->screen
->uniform_bo
;
428 const unsigned base
= NVC0_CB_USR_INFO(s
);
429 const unsigned size
= nvc0
->constbuf
[s
][0].size
;
430 assert(i
== 0); /* we really only want OpenGL uniforms here */
431 assert(nvc0
->constbuf
[s
][0].u
.data
);
433 if (nvc0
->state
.uniform_buffer_bound
[s
] < size
) {
434 nvc0
->state
.uniform_buffer_bound
[s
] = align(size
, 0x100);
436 BEGIN_NVC0(push
, NVC0_3D(CB_SIZE
), 3);
437 PUSH_DATA (push
, nvc0
->state
.uniform_buffer_bound
[s
]);
438 PUSH_DATAh(push
, bo
->offset
+ base
);
439 PUSH_DATA (push
, bo
->offset
+ base
);
440 BEGIN_NVC0(push
, NVC0_3D(CB_BIND(s
)), 1);
441 PUSH_DATA (push
, (0 << 4) | 1);
443 nvc0_cb_bo_push(&nvc0
->base
, bo
, NV_VRAM_DOMAIN(&nvc0
->screen
->base
),
444 base
, nvc0
->state
.uniform_buffer_bound
[s
],
446 nvc0
->constbuf
[s
][0].u
.data
);
448 struct nv04_resource
*res
=
449 nv04_resource(nvc0
->constbuf
[s
][i
].u
.buf
);
451 BEGIN_NVC0(push
, NVC0_3D(CB_SIZE
), 3);
452 PUSH_DATA (push
, nvc0
->constbuf
[s
][i
].size
);
453 PUSH_DATAh(push
, res
->address
+ nvc0
->constbuf
[s
][i
].offset
);
454 PUSH_DATA (push
, res
->address
+ nvc0
->constbuf
[s
][i
].offset
);
455 BEGIN_NVC0(push
, NVC0_3D(CB_BIND(s
)), 1);
456 PUSH_DATA (push
, (i
<< 4) | 1);
458 BCTX_REFN(nvc0
->bufctx_3d
, 3D_CB(s
, i
), res
, RD
);
460 nvc0
->cb_dirty
= 1; /* Force cache flush for UBO. */
461 res
->cb_bindings
[s
] |= 1 << i
;
463 BEGIN_NVC0(push
, NVC0_3D(CB_BIND(s
)), 1);
464 PUSH_DATA (push
, (i
<< 4) | 0);
467 nvc0
->state
.uniform_buffer_bound
[s
] = 0;
472 /* Invalidate all COMPUTE constbufs because they are aliased with 3D. */
473 nvc0
->dirty_cp
|= NVC0_NEW_CP_CONSTBUF
;
474 nvc0
->constbuf_dirty
[5] |= nvc0
->constbuf_valid
[5];
475 nvc0
->state
.uniform_buffer_bound
[5] = 0;
479 nvc0_validate_buffers(struct nvc0_context
*nvc0
)
481 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
482 struct nvc0_screen
*screen
= nvc0
->screen
;
485 for (s
= 0; s
< 5; s
++) {
486 BEGIN_NVC0(push
, NVC0_3D(CB_SIZE
), 3);
487 PUSH_DATA (push
, 1024);
488 PUSH_DATAh(push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(s
));
489 PUSH_DATA (push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(s
));
490 BEGIN_1IC0(push
, NVC0_3D(CB_POS
), 1 + 4 * NVC0_MAX_BUFFERS
);
491 PUSH_DATA (push
, NVC0_CB_AUX_BUF_INFO(0));
492 for (i
= 0; i
< NVC0_MAX_BUFFERS
; i
++) {
493 if (nvc0
->buffers
[s
][i
].buffer
) {
494 struct nv04_resource
*res
=
495 nv04_resource(nvc0
->buffers
[s
][i
].buffer
);
496 PUSH_DATA (push
, res
->address
+ nvc0
->buffers
[s
][i
].buffer_offset
);
497 PUSH_DATAh(push
, res
->address
+ nvc0
->buffers
[s
][i
].buffer_offset
);
498 PUSH_DATA (push
, nvc0
->buffers
[s
][i
].buffer_size
);
500 BCTX_REFN(nvc0
->bufctx_3d
, 3D_BUF
, res
, RDWR
);
513 nvc0_validate_sample_mask(struct nvc0_context
*nvc0
)
515 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
519 nvc0
->sample_mask
& 0xffff,
520 nvc0
->sample_mask
& 0xffff,
521 nvc0
->sample_mask
& 0xffff,
522 nvc0
->sample_mask
& 0xffff
525 BEGIN_NVC0(push
, NVC0_3D(MSAA_MASK(0)), 4);
526 PUSH_DATA (push
, mask
[0]);
527 PUSH_DATA (push
, mask
[1]);
528 PUSH_DATA (push
, mask
[2]);
529 PUSH_DATA (push
, mask
[3]);
533 nvc0_validate_min_samples(struct nvc0_context
*nvc0
)
535 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
538 samples
= util_next_power_of_two(nvc0
->min_samples
);
540 samples
|= NVC0_3D_SAMPLE_SHADING_ENABLE
;
542 IMMED_NVC0(push
, NVC0_3D(SAMPLE_SHADING
), samples
);
546 nvc0_validate_driverconst(struct nvc0_context
*nvc0
)
548 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
549 struct nvc0_screen
*screen
= nvc0
->screen
;
552 for (i
= 0; i
< 5; ++i
) {
553 BEGIN_NVC0(push
, NVC0_3D(CB_SIZE
), 3);
554 PUSH_DATA (push
, 1024);
555 PUSH_DATAh(push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(i
));
556 PUSH_DATA (push
, screen
->uniform_bo
->offset
+ NVC0_CB_AUX_INFO(i
));
557 BEGIN_NVC0(push
, NVC0_3D(CB_BIND(i
)), 1);
558 PUSH_DATA (push
, (15 << 4) | 1);
561 nvc0
->dirty_cp
|= NVC0_NEW_CP_DRIVERCONST
;
565 nvc0_validate_derived_1(struct nvc0_context
*nvc0
)
567 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
568 bool rasterizer_discard
;
570 if (nvc0
->rast
&& nvc0
->rast
->pipe
.rasterizer_discard
) {
571 rasterizer_discard
= true;
573 bool zs
= nvc0
->zsa
&&
574 (nvc0
->zsa
->pipe
.depth
.enabled
|| nvc0
->zsa
->pipe
.stencil
[0].enabled
);
575 rasterizer_discard
= !zs
&&
576 (!nvc0
->fragprog
|| !nvc0
->fragprog
->hdr
[18]);
579 if (rasterizer_discard
!= nvc0
->state
.rasterizer_discard
) {
580 nvc0
->state
.rasterizer_discard
= rasterizer_discard
;
581 IMMED_NVC0(push
, NVC0_3D(RASTERIZE_ENABLE
), !rasterizer_discard
);
585 /* alpha test is disabled if there are no color RTs, so make sure we have at
586 * least one if alpha test is enabled. Note that this must run after
587 * nvc0_validate_fb, otherwise that will override the RT count setting.
590 nvc0_validate_derived_2(struct nvc0_context
*nvc0
)
592 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
594 if (nvc0
->zsa
&& nvc0
->zsa
->pipe
.alpha
.enabled
&&
595 nvc0
->framebuffer
.nr_cbufs
== 0) {
596 nvc0_fb_set_null_rt(push
, 0);
597 BEGIN_NVC0(push
, NVC0_3D(RT_CONTROL
), 1);
598 PUSH_DATA (push
, (076543210 << 4) | 1);
603 nvc0_validate_derived_3(struct nvc0_context
*nvc0
)
605 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
606 struct pipe_framebuffer_state
*fb
= &nvc0
->framebuffer
;
609 if ((!fb
->nr_cbufs
|| !fb
->cbufs
[0] ||
610 !util_format_is_pure_integer(fb
->cbufs
[0]->format
)) && nvc0
->blend
) {
611 if (nvc0
->blend
->pipe
.alpha_to_coverage
)
612 ms
|= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_COVERAGE
;
613 if (nvc0
->blend
->pipe
.alpha_to_one
)
614 ms
|= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_ONE
;
617 BEGIN_NVC0(push
, NVC0_3D(MULTISAMPLE_CTRL
), 1);
618 PUSH_DATA (push
, ms
);
622 nvc0_validate_tess_state(struct nvc0_context
*nvc0
)
624 struct nouveau_pushbuf
*push
= nvc0
->base
.pushbuf
;
626 BEGIN_NVC0(push
, NVC0_3D(TESS_LEVEL_OUTER(0)), 6);
627 PUSH_DATAp(push
, nvc0
->default_tess_outer
, 4);
628 PUSH_DATAp(push
, nvc0
->default_tess_inner
, 2);
632 nvc0_switch_pipe_context(struct nvc0_context
*ctx_to
)
634 struct nvc0_context
*ctx_from
= ctx_to
->screen
->cur_ctx
;
638 ctx_to
->state
= ctx_from
->state
;
640 ctx_to
->state
= ctx_to
->screen
->save_state
;
642 ctx_to
->dirty_3d
= ~0;
643 ctx_to
->dirty_cp
= ~0;
644 ctx_to
->viewports_dirty
= ~0;
645 ctx_to
->scissors_dirty
= ~0;
647 for (s
= 0; s
< 6; ++s
) {
648 ctx_to
->samplers_dirty
[s
] = ~0;
649 ctx_to
->textures_dirty
[s
] = ~0;
650 ctx_to
->constbuf_dirty
[s
] = (1 << NVC0_MAX_PIPE_CONSTBUFS
) - 1;
651 ctx_to
->buffers_dirty
[s
] = ~0;
654 /* Reset tfb as the shader that owns it may have been deleted. */
655 ctx_to
->state
.tfb
= NULL
;
658 ctx_to
->dirty_3d
&= ~(NVC0_NEW_3D_VERTEX
| NVC0_NEW_3D_ARRAYS
);
659 if (!ctx_to
->idxbuf
.buffer
)
660 ctx_to
->dirty_3d
&= ~NVC0_NEW_3D_IDXBUF
;
662 if (!ctx_to
->vertprog
)
663 ctx_to
->dirty_3d
&= ~NVC0_NEW_3D_VERTPROG
;
664 if (!ctx_to
->fragprog
)
665 ctx_to
->dirty_3d
&= ~NVC0_NEW_3D_FRAGPROG
;
668 ctx_to
->dirty_3d
&= ~NVC0_NEW_3D_BLEND
;
670 ctx_to
->dirty_3d
&= ~(NVC0_NEW_3D_RASTERIZER
| NVC0_NEW_3D_SCISSOR
);
672 ctx_to
->dirty_3d
&= ~NVC0_NEW_3D_ZSA
;
674 ctx_to
->screen
->cur_ctx
= ctx_to
;
677 static struct nvc0_state_validate
678 validate_list_3d
[] = {
679 { nvc0_validate_fb
, NVC0_NEW_3D_FRAMEBUFFER
},
680 { nvc0_validate_blend
, NVC0_NEW_3D_BLEND
},
681 { nvc0_validate_zsa
, NVC0_NEW_3D_ZSA
},
682 { nvc0_validate_sample_mask
, NVC0_NEW_3D_SAMPLE_MASK
},
683 { nvc0_validate_rasterizer
, NVC0_NEW_3D_RASTERIZER
},
684 { nvc0_validate_blend_colour
, NVC0_NEW_3D_BLEND_COLOUR
},
685 { nvc0_validate_stencil_ref
, NVC0_NEW_3D_STENCIL_REF
},
686 { nvc0_validate_stipple
, NVC0_NEW_3D_STIPPLE
},
687 { nvc0_validate_scissor
, NVC0_NEW_3D_SCISSOR
| NVC0_NEW_3D_RASTERIZER
},
688 { nvc0_validate_viewport
, NVC0_NEW_3D_VIEWPORT
},
689 { nvc0_vertprog_validate
, NVC0_NEW_3D_VERTPROG
},
690 { nvc0_tctlprog_validate
, NVC0_NEW_3D_TCTLPROG
},
691 { nvc0_tevlprog_validate
, NVC0_NEW_3D_TEVLPROG
},
692 { nvc0_validate_tess_state
, NVC0_NEW_3D_TESSFACTOR
},
693 { nvc0_gmtyprog_validate
, NVC0_NEW_3D_GMTYPROG
},
694 { nvc0_fragprog_validate
, NVC0_NEW_3D_FRAGPROG
| NVC0_NEW_3D_RASTERIZER
},
695 { nvc0_validate_derived_1
, NVC0_NEW_3D_FRAGPROG
| NVC0_NEW_3D_ZSA
|
696 NVC0_NEW_3D_RASTERIZER
},
697 { nvc0_validate_derived_2
, NVC0_NEW_3D_ZSA
| NVC0_NEW_3D_FRAMEBUFFER
},
698 { nvc0_validate_derived_3
, NVC0_NEW_3D_BLEND
| NVC0_NEW_3D_FRAMEBUFFER
},
699 { nvc0_validate_clip
, NVC0_NEW_3D_CLIP
| NVC0_NEW_3D_RASTERIZER
|
700 NVC0_NEW_3D_VERTPROG
|
701 NVC0_NEW_3D_TEVLPROG
|
702 NVC0_NEW_3D_GMTYPROG
},
703 { nvc0_constbufs_validate
, NVC0_NEW_3D_CONSTBUF
},
704 { nvc0_validate_textures
, NVC0_NEW_3D_TEXTURES
},
705 { nvc0_validate_samplers
, NVC0_NEW_3D_SAMPLERS
},
706 { nve4_set_tex_handles
, NVC0_NEW_3D_TEXTURES
| NVC0_NEW_3D_SAMPLERS
},
707 { nvc0_vertex_arrays_validate
, NVC0_NEW_3D_VERTEX
| NVC0_NEW_3D_ARRAYS
},
708 { nvc0_validate_surfaces
, NVC0_NEW_3D_SURFACES
},
709 { nvc0_validate_buffers
, NVC0_NEW_3D_BUFFERS
},
710 { nvc0_idxbuf_validate
, NVC0_NEW_3D_IDXBUF
},
711 { nvc0_tfb_validate
, NVC0_NEW_3D_TFB_TARGETS
| NVC0_NEW_3D_GMTYPROG
},
712 { nvc0_validate_min_samples
, NVC0_NEW_3D_MIN_SAMPLES
},
713 { nvc0_validate_driverconst
, NVC0_NEW_3D_DRIVERCONST
},
717 nvc0_state_validate(struct nvc0_context
*nvc0
, uint32_t mask
,
718 struct nvc0_state_validate
*validate_list
, int size
,
719 uint32_t *dirty
, struct nouveau_bufctx
*bufctx
)
725 if (nvc0
->screen
->cur_ctx
!= nvc0
)
726 nvc0_switch_pipe_context(nvc0
);
728 state_mask
= *dirty
& mask
;
731 for (i
= 0; i
< size
; ++i
) {
732 struct nvc0_state_validate
*validate
= &validate_list
[i
];
734 if (state_mask
& validate
->states
)
735 validate
->func(nvc0
);
737 *dirty
&= ~state_mask
;
739 nvc0_bufctx_fence(nvc0
, bufctx
, false);
742 nouveau_pushbuf_bufctx(nvc0
->base
.pushbuf
, bufctx
);
743 ret
= nouveau_pushbuf_validate(nvc0
->base
.pushbuf
);
749 nvc0_state_validate_3d(struct nvc0_context
*nvc0
, uint32_t mask
)
753 ret
= nvc0_state_validate(nvc0
, mask
, validate_list_3d
,
754 ARRAY_SIZE(validate_list_3d
), &nvc0
->dirty_3d
,
757 if (unlikely(nvc0
->state
.flushed
)) {
758 nvc0
->state
.flushed
= false;
759 nvc0_bufctx_fence(nvc0
, nvc0
->bufctx_3d
, true);