nv50,nvc0: handle user constbufs without wrapping them in a resource
[mesa.git] / src / gallium / drivers / nvc0 / nvc0_state.c
1 /*
2 * Copyright 2010 Christoph Bumiller
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
18 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
19 * OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 * SOFTWARE.
21 */
22
23 #include "pipe/p_defines.h"
24 #include "util/u_inlines.h"
25 #include "util/u_transfer.h"
26
27 #include "tgsi/tgsi_parse.h"
28
29 #include "nvc0_stateobj.h"
30 #include "nvc0_context.h"
31
32 #include "nvc0_3d.xml.h"
33 #include "nv50/nv50_texture.xml.h"
34
35 #include "nouveau/nouveau_gldefs.h"
36
37 static INLINE uint32_t
38 nvc0_colormask(unsigned mask)
39 {
40 uint32_t ret = 0;
41
42 if (mask & PIPE_MASK_R)
43 ret |= 0x0001;
44 if (mask & PIPE_MASK_G)
45 ret |= 0x0010;
46 if (mask & PIPE_MASK_B)
47 ret |= 0x0100;
48 if (mask & PIPE_MASK_A)
49 ret |= 0x1000;
50
51 return ret;
52 }
53
54 #define NVC0_BLEND_FACTOR_CASE(a, b) \
55 case PIPE_BLENDFACTOR_##a: return NV50_3D_BLEND_FACTOR_##b
56
57 static INLINE uint32_t
58 nvc0_blend_fac(unsigned factor)
59 {
60 switch (factor) {
61 NVC0_BLEND_FACTOR_CASE(ONE, ONE);
62 NVC0_BLEND_FACTOR_CASE(SRC_COLOR, SRC_COLOR);
63 NVC0_BLEND_FACTOR_CASE(SRC_ALPHA, SRC_ALPHA);
64 NVC0_BLEND_FACTOR_CASE(DST_ALPHA, DST_ALPHA);
65 NVC0_BLEND_FACTOR_CASE(DST_COLOR, DST_COLOR);
66 NVC0_BLEND_FACTOR_CASE(SRC_ALPHA_SATURATE, SRC_ALPHA_SATURATE);
67 NVC0_BLEND_FACTOR_CASE(CONST_COLOR, CONSTANT_COLOR);
68 NVC0_BLEND_FACTOR_CASE(CONST_ALPHA, CONSTANT_ALPHA);
69 NVC0_BLEND_FACTOR_CASE(SRC1_COLOR, SRC1_COLOR);
70 NVC0_BLEND_FACTOR_CASE(SRC1_ALPHA, SRC1_ALPHA);
71 NVC0_BLEND_FACTOR_CASE(ZERO, ZERO);
72 NVC0_BLEND_FACTOR_CASE(INV_SRC_COLOR, ONE_MINUS_SRC_COLOR);
73 NVC0_BLEND_FACTOR_CASE(INV_SRC_ALPHA, ONE_MINUS_SRC_ALPHA);
74 NVC0_BLEND_FACTOR_CASE(INV_DST_ALPHA, ONE_MINUS_DST_ALPHA);
75 NVC0_BLEND_FACTOR_CASE(INV_DST_COLOR, ONE_MINUS_DST_COLOR);
76 NVC0_BLEND_FACTOR_CASE(INV_CONST_COLOR, ONE_MINUS_CONSTANT_COLOR);
77 NVC0_BLEND_FACTOR_CASE(INV_CONST_ALPHA, ONE_MINUS_CONSTANT_ALPHA);
78 NVC0_BLEND_FACTOR_CASE(INV_SRC1_COLOR, ONE_MINUS_SRC1_COLOR);
79 NVC0_BLEND_FACTOR_CASE(INV_SRC1_ALPHA, ONE_MINUS_SRC1_ALPHA);
80 default:
81 return NV50_3D_BLEND_FACTOR_ZERO;
82 }
83 }
84
85 static void *
86 nvc0_blend_state_create(struct pipe_context *pipe,
87 const struct pipe_blend_state *cso)
88 {
89 struct nvc0_blend_stateobj *so = CALLOC_STRUCT(nvc0_blend_stateobj);
90 int i;
91 uint32_t ms;
92
93 so->pipe = *cso;
94
95 SB_IMMED_3D(so, BLEND_INDEPENDENT, cso->independent_blend_enable);
96
97 if (!cso->logicop_enable)
98 SB_IMMED_3D(so, LOGIC_OP_ENABLE, 0);
99
100 if (cso->logicop_enable) {
101 SB_BEGIN_3D(so, LOGIC_OP_ENABLE, 2);
102 SB_DATA (so, 1);
103 SB_DATA (so, nvgl_logicop_func(cso->logicop_func));
104
105 SB_IMMED_3D(so, MACRO_BLEND_ENABLES, 0);
106 } else
107 if (!cso->independent_blend_enable) {
108 SB_IMMED_3D(so,
109 MACRO_BLEND_ENABLES, cso->rt[0].blend_enable ? 0xff : 0);
110
111 if (cso->rt[0].blend_enable) {
112 SB_BEGIN_3D(so, BLEND_EQUATION_RGB, 5);
113 SB_DATA (so, nvgl_blend_eqn(cso->rt[0].rgb_func));
114 SB_DATA (so, nvc0_blend_fac(cso->rt[0].rgb_src_factor));
115 SB_DATA (so, nvc0_blend_fac(cso->rt[0].rgb_dst_factor));
116 SB_DATA (so, nvgl_blend_eqn(cso->rt[0].alpha_func));
117 SB_DATA (so, nvc0_blend_fac(cso->rt[0].alpha_src_factor));
118 SB_BEGIN_3D(so, BLEND_FUNC_DST_ALPHA, 1);
119 SB_DATA (so, nvc0_blend_fac(cso->rt[0].alpha_dst_factor));
120 }
121
122 SB_IMMED_3D(so, COLOR_MASK_COMMON, 1);
123 SB_BEGIN_3D(so, COLOR_MASK(0), 1);
124 SB_DATA (so, nvc0_colormask(cso->rt[0].colormask));
125 } else {
126 uint8_t en = 0;
127
128 for (i = 0; i < 8; ++i) {
129 if (!cso->rt[i].blend_enable)
130 continue;
131 en |= 1 << i;
132
133 SB_BEGIN_3D(so, IBLEND_EQUATION_RGB(i), 6);
134 SB_DATA (so, nvgl_blend_eqn(cso->rt[i].rgb_func));
135 SB_DATA (so, nvc0_blend_fac(cso->rt[i].rgb_src_factor));
136 SB_DATA (so, nvc0_blend_fac(cso->rt[i].rgb_dst_factor));
137 SB_DATA (so, nvgl_blend_eqn(cso->rt[i].alpha_func));
138 SB_DATA (so, nvc0_blend_fac(cso->rt[i].alpha_src_factor));
139 SB_DATA (so, nvc0_blend_fac(cso->rt[i].alpha_dst_factor));
140 }
141 SB_IMMED_3D(so, MACRO_BLEND_ENABLES, en);
142
143 SB_IMMED_3D(so, COLOR_MASK_COMMON, 0);
144 SB_BEGIN_3D(so, COLOR_MASK(0), 8);
145 for (i = 0; i < 8; ++i)
146 SB_DATA(so, nvc0_colormask(cso->rt[i].colormask));
147 }
148
149 ms = 0;
150 if (cso->alpha_to_coverage)
151 ms |= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_COVERAGE;
152 if (cso->alpha_to_one)
153 ms |= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_ONE;
154
155 SB_BEGIN_3D(so, MULTISAMPLE_CTRL, 1);
156 SB_DATA (so, ms);
157
158 assert(so->size <= (sizeof(so->state) / sizeof(so->state[0])));
159 return so;
160 }
161
162 static void
163 nvc0_blend_state_bind(struct pipe_context *pipe, void *hwcso)
164 {
165 struct nvc0_context *nvc0 = nvc0_context(pipe);
166
167 nvc0->blend = hwcso;
168 nvc0->dirty |= NVC0_NEW_BLEND;
169 }
170
171 static void
172 nvc0_blend_state_delete(struct pipe_context *pipe, void *hwcso)
173 {
174 FREE(hwcso);
175 }
176
177 /* NOTE: ignoring line_last_pixel, using FALSE (set on screen init) */
178 static void *
179 nvc0_rasterizer_state_create(struct pipe_context *pipe,
180 const struct pipe_rasterizer_state *cso)
181 {
182 struct nvc0_rasterizer_stateobj *so;
183 uint32_t reg;
184
185 so = CALLOC_STRUCT(nvc0_rasterizer_stateobj);
186 if (!so)
187 return NULL;
188 so->pipe = *cso;
189
190 /* Scissor enables are handled in scissor state, we will not want to
191 * always emit 16 commands, one for each scissor rectangle, here.
192 */
193
194 SB_BEGIN_3D(so, SHADE_MODEL, 1);
195 SB_DATA (so, cso->flatshade ? NVC0_3D_SHADE_MODEL_FLAT :
196 NVC0_3D_SHADE_MODEL_SMOOTH);
197 SB_IMMED_3D(so, PROVOKING_VERTEX_LAST, !cso->flatshade_first);
198 SB_IMMED_3D(so, VERTEX_TWO_SIDE_ENABLE, cso->light_twoside);
199
200 SB_IMMED_3D(so, VERT_COLOR_CLAMP_EN, cso->clamp_vertex_color);
201 SB_BEGIN_3D(so, FRAG_COLOR_CLAMP_EN, 1);
202 SB_DATA (so, cso->clamp_fragment_color ? 0x11111111 : 0x00000000);
203
204 SB_IMMED_3D(so, MULTISAMPLE_ENABLE, cso->multisample);
205
206 SB_IMMED_3D(so, LINE_SMOOTH_ENABLE, cso->line_smooth);
207 if (cso->line_smooth)
208 SB_BEGIN_3D(so, LINE_WIDTH_SMOOTH, 1);
209 else
210 SB_BEGIN_3D(so, LINE_WIDTH_ALIASED, 1);
211 SB_DATA (so, fui(cso->line_width));
212
213 SB_IMMED_3D(so, LINE_STIPPLE_ENABLE, cso->line_stipple_enable);
214 if (cso->line_stipple_enable) {
215 SB_BEGIN_3D(so, LINE_STIPPLE_PATTERN, 1);
216 SB_DATA (so, (cso->line_stipple_pattern << 8) |
217 cso->line_stipple_factor);
218
219 }
220
221 SB_IMMED_3D(so, VP_POINT_SIZE_EN, cso->point_size_per_vertex);
222 if (!cso->point_size_per_vertex) {
223 SB_BEGIN_3D(so, POINT_SIZE, 1);
224 SB_DATA (so, fui(cso->point_size));
225 }
226
227 reg = (cso->sprite_coord_mode == PIPE_SPRITE_COORD_UPPER_LEFT) ?
228 NVC0_3D_POINT_COORD_REPLACE_COORD_ORIGIN_UPPER_LEFT :
229 NVC0_3D_POINT_COORD_REPLACE_COORD_ORIGIN_LOWER_LEFT;
230
231 SB_BEGIN_3D(so, POINT_COORD_REPLACE, 1);
232 SB_DATA (so, ((cso->sprite_coord_enable & 0xff) << 3) | reg);
233 SB_IMMED_3D(so, POINT_SPRITE_ENABLE, cso->point_quad_rasterization);
234 SB_IMMED_3D(so, POINT_SMOOTH_ENABLE, cso->point_smooth);
235
236 SB_BEGIN_3D(so, MACRO_POLYGON_MODE_FRONT, 1);
237 SB_DATA (so, nvgl_polygon_mode(cso->fill_front));
238 SB_BEGIN_3D(so, MACRO_POLYGON_MODE_BACK, 1);
239 SB_DATA (so, nvgl_polygon_mode(cso->fill_back));
240 SB_IMMED_3D(so, POLYGON_SMOOTH_ENABLE, cso->poly_smooth);
241
242 SB_BEGIN_3D(so, CULL_FACE_ENABLE, 3);
243 SB_DATA (so, cso->cull_face != PIPE_FACE_NONE);
244 SB_DATA (so, cso->front_ccw ? NVC0_3D_FRONT_FACE_CCW :
245 NVC0_3D_FRONT_FACE_CW);
246 switch (cso->cull_face) {
247 case PIPE_FACE_FRONT_AND_BACK:
248 SB_DATA(so, NVC0_3D_CULL_FACE_FRONT_AND_BACK);
249 break;
250 case PIPE_FACE_FRONT:
251 SB_DATA(so, NVC0_3D_CULL_FACE_FRONT);
252 break;
253 case PIPE_FACE_BACK:
254 default:
255 SB_DATA(so, NVC0_3D_CULL_FACE_BACK);
256 break;
257 }
258
259 SB_IMMED_3D(so, POLYGON_STIPPLE_ENABLE, cso->poly_stipple_enable);
260 SB_BEGIN_3D(so, POLYGON_OFFSET_POINT_ENABLE, 3);
261 SB_DATA (so, cso->offset_point);
262 SB_DATA (so, cso->offset_line);
263 SB_DATA (so, cso->offset_tri);
264
265 if (cso->offset_point || cso->offset_line || cso->offset_tri) {
266 SB_BEGIN_3D(so, POLYGON_OFFSET_FACTOR, 1);
267 SB_DATA (so, fui(cso->offset_scale));
268 SB_BEGIN_3D(so, POLYGON_OFFSET_UNITS, 1);
269 SB_DATA (so, fui(cso->offset_units * 2.0f));
270 SB_BEGIN_3D(so, POLYGON_OFFSET_CLAMP, 1);
271 SB_DATA (so, fui(cso->offset_clamp));
272 }
273
274 if (cso->depth_clip)
275 reg = NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1;
276 else
277 reg =
278 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1 |
279 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_NEAR |
280 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_FAR |
281 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK12_UNK2;
282
283 SB_BEGIN_3D(so, VIEW_VOLUME_CLIP_CTRL, 1);
284 SB_DATA (so, reg);
285
286 assert(so->size <= (sizeof(so->state) / sizeof(so->state[0])));
287 return (void *)so;
288 }
289
290 static void
291 nvc0_rasterizer_state_bind(struct pipe_context *pipe, void *hwcso)
292 {
293 struct nvc0_context *nvc0 = nvc0_context(pipe);
294
295 nvc0->rast = hwcso;
296 nvc0->dirty |= NVC0_NEW_RASTERIZER;
297 }
298
299 static void
300 nvc0_rasterizer_state_delete(struct pipe_context *pipe, void *hwcso)
301 {
302 FREE(hwcso);
303 }
304
305 static void *
306 nvc0_zsa_state_create(struct pipe_context *pipe,
307 const struct pipe_depth_stencil_alpha_state *cso)
308 {
309 struct nvc0_zsa_stateobj *so = CALLOC_STRUCT(nvc0_zsa_stateobj);
310
311 so->pipe = *cso;
312
313 SB_IMMED_3D(so, DEPTH_TEST_ENABLE, cso->depth.enabled);
314 if (cso->depth.enabled) {
315 SB_IMMED_3D(so, DEPTH_WRITE_ENABLE, cso->depth.writemask);
316 SB_BEGIN_3D(so, DEPTH_TEST_FUNC, 1);
317 SB_DATA (so, nvgl_comparison_op(cso->depth.func));
318 }
319
320 if (cso->stencil[0].enabled) {
321 SB_BEGIN_3D(so, STENCIL_ENABLE, 5);
322 SB_DATA (so, 1);
323 SB_DATA (so, nvgl_stencil_op(cso->stencil[0].fail_op));
324 SB_DATA (so, nvgl_stencil_op(cso->stencil[0].zfail_op));
325 SB_DATA (so, nvgl_stencil_op(cso->stencil[0].zpass_op));
326 SB_DATA (so, nvgl_comparison_op(cso->stencil[0].func));
327 SB_BEGIN_3D(so, STENCIL_FRONT_FUNC_MASK, 2);
328 SB_DATA (so, cso->stencil[0].valuemask);
329 SB_DATA (so, cso->stencil[0].writemask);
330 } else {
331 SB_IMMED_3D(so, STENCIL_ENABLE, 0);
332 }
333
334 if (cso->stencil[1].enabled) {
335 assert(cso->stencil[0].enabled);
336 SB_BEGIN_3D(so, STENCIL_TWO_SIDE_ENABLE, 5);
337 SB_DATA (so, 1);
338 SB_DATA (so, nvgl_stencil_op(cso->stencil[1].fail_op));
339 SB_DATA (so, nvgl_stencil_op(cso->stencil[1].zfail_op));
340 SB_DATA (so, nvgl_stencil_op(cso->stencil[1].zpass_op));
341 SB_DATA (so, nvgl_comparison_op(cso->stencil[1].func));
342 SB_BEGIN_3D(so, STENCIL_BACK_MASK, 2);
343 SB_DATA (so, cso->stencil[1].writemask);
344 SB_DATA (so, cso->stencil[1].valuemask);
345 } else
346 if (cso->stencil[0].enabled) {
347 SB_IMMED_3D(so, STENCIL_TWO_SIDE_ENABLE, 0);
348 }
349
350 SB_IMMED_3D(so, ALPHA_TEST_ENABLE, cso->alpha.enabled);
351 if (cso->alpha.enabled) {
352 SB_BEGIN_3D(so, ALPHA_TEST_REF, 2);
353 SB_DATA (so, fui(cso->alpha.ref_value));
354 SB_DATA (so, nvgl_comparison_op(cso->alpha.func));
355 }
356
357 assert(so->size <= (sizeof(so->state) / sizeof(so->state[0])));
358 return (void *)so;
359 }
360
361 static void
362 nvc0_zsa_state_bind(struct pipe_context *pipe, void *hwcso)
363 {
364 struct nvc0_context *nvc0 = nvc0_context(pipe);
365
366 nvc0->zsa = hwcso;
367 nvc0->dirty |= NVC0_NEW_ZSA;
368 }
369
370 static void
371 nvc0_zsa_state_delete(struct pipe_context *pipe, void *hwcso)
372 {
373 FREE(hwcso);
374 }
375
376 /* ====================== SAMPLERS AND TEXTURES ================================
377 */
378
379 #define NV50_TSC_WRAP_CASE(n) \
380 case PIPE_TEX_WRAP_##n: return NV50_TSC_WRAP_##n
381
382 static INLINE unsigned
383 nv50_tsc_wrap_mode(unsigned wrap)
384 {
385 switch (wrap) {
386 NV50_TSC_WRAP_CASE(REPEAT);
387 NV50_TSC_WRAP_CASE(MIRROR_REPEAT);
388 NV50_TSC_WRAP_CASE(CLAMP_TO_EDGE);
389 NV50_TSC_WRAP_CASE(CLAMP_TO_BORDER);
390 NV50_TSC_WRAP_CASE(CLAMP);
391 NV50_TSC_WRAP_CASE(MIRROR_CLAMP_TO_EDGE);
392 NV50_TSC_WRAP_CASE(MIRROR_CLAMP_TO_BORDER);
393 NV50_TSC_WRAP_CASE(MIRROR_CLAMP);
394 default:
395 NOUVEAU_ERR("unknown wrap mode: %d\n", wrap);
396 return NV50_TSC_WRAP_REPEAT;
397 }
398 }
399
400 static void
401 nvc0_sampler_state_delete(struct pipe_context *pipe, void *hwcso)
402 {
403 unsigned s, i;
404
405 for (s = 0; s < 5; ++s)
406 for (i = 0; i < nvc0_context(pipe)->num_samplers[s]; ++i)
407 if (nvc0_context(pipe)->samplers[s][i] == hwcso)
408 nvc0_context(pipe)->samplers[s][i] = NULL;
409
410 nvc0_screen_tsc_free(nvc0_context(pipe)->screen, nv50_tsc_entry(hwcso));
411
412 FREE(hwcso);
413 }
414
415 static INLINE void
416 nvc0_stage_sampler_states_bind(struct nvc0_context *nvc0, int s,
417 unsigned nr, void **hwcso)
418 {
419 unsigned i;
420
421 for (i = 0; i < nr; ++i) {
422 struct nv50_tsc_entry *old = nvc0->samplers[s][i];
423
424 if (hwcso[i] == old)
425 continue;
426 nvc0->samplers_dirty[s] |= 1 << i;
427
428 nvc0->samplers[s][i] = nv50_tsc_entry(hwcso[i]);
429 if (old)
430 nvc0_screen_tsc_unlock(nvc0->screen, old);
431 }
432 for (; i < nvc0->num_samplers[s]; ++i) {
433 if (nvc0->samplers[s][i]) {
434 nvc0_screen_tsc_unlock(nvc0->screen, nvc0->samplers[s][i]);
435 nvc0->samplers[s][i] = NULL;
436 }
437 }
438
439 nvc0->num_samplers[s] = nr;
440
441 nvc0->dirty |= NVC0_NEW_SAMPLERS;
442 }
443
444 static void
445 nvc0_vp_sampler_states_bind(struct pipe_context *pipe, unsigned nr, void **s)
446 {
447 nvc0_stage_sampler_states_bind(nvc0_context(pipe), 0, nr, s);
448 }
449
450 static void
451 nvc0_fp_sampler_states_bind(struct pipe_context *pipe, unsigned nr, void **s)
452 {
453 nvc0_stage_sampler_states_bind(nvc0_context(pipe), 4, nr, s);
454 }
455
456 static void
457 nvc0_gp_sampler_states_bind(struct pipe_context *pipe, unsigned nr, void **s)
458 {
459 nvc0_stage_sampler_states_bind(nvc0_context(pipe), 3, nr, s);
460 }
461
462 /* NOTE: only called when not referenced anywhere, won't be bound */
463 static void
464 nvc0_sampler_view_destroy(struct pipe_context *pipe,
465 struct pipe_sampler_view *view)
466 {
467 pipe_resource_reference(&view->texture, NULL);
468
469 nvc0_screen_tic_free(nvc0_context(pipe)->screen, nv50_tic_entry(view));
470
471 FREE(nv50_tic_entry(view));
472 }
473
474 static INLINE void
475 nvc0_stage_set_sampler_views(struct nvc0_context *nvc0, int s,
476 unsigned nr,
477 struct pipe_sampler_view **views)
478 {
479 unsigned i;
480
481 for (i = 0; i < nr; ++i) {
482 struct nv50_tic_entry *old = nv50_tic_entry(nvc0->textures[s][i]);
483
484 if (views[i] == nvc0->textures[s][i])
485 continue;
486 nvc0->textures_dirty[s] |= 1 << i;
487
488 if (old) {
489 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_TEX(s, i));
490 nvc0_screen_tic_unlock(nvc0->screen, old);
491 }
492
493 pipe_sampler_view_reference(&nvc0->textures[s][i], views[i]);
494 }
495
496 for (i = nr; i < nvc0->num_textures[s]; ++i) {
497 struct nv50_tic_entry *old = nv50_tic_entry(nvc0->textures[s][i]);
498 if (old) {
499 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_TEX(s, i));
500 nvc0_screen_tic_unlock(nvc0->screen, old);
501 pipe_sampler_view_reference(&nvc0->textures[s][i], NULL);
502 }
503 }
504
505 nvc0->num_textures[s] = nr;
506
507 nvc0->dirty |= NVC0_NEW_TEXTURES;
508 }
509
510 static void
511 nvc0_vp_set_sampler_views(struct pipe_context *pipe,
512 unsigned nr,
513 struct pipe_sampler_view **views)
514 {
515 nvc0_stage_set_sampler_views(nvc0_context(pipe), 0, nr, views);
516 }
517
518 static void
519 nvc0_fp_set_sampler_views(struct pipe_context *pipe,
520 unsigned nr,
521 struct pipe_sampler_view **views)
522 {
523 nvc0_stage_set_sampler_views(nvc0_context(pipe), 4, nr, views);
524 }
525
526 static void
527 nvc0_gp_set_sampler_views(struct pipe_context *pipe,
528 unsigned nr,
529 struct pipe_sampler_view **views)
530 {
531 nvc0_stage_set_sampler_views(nvc0_context(pipe), 3, nr, views);
532 }
533
534 /* ============================= SHADERS =======================================
535 */
536
537 static void *
538 nvc0_sp_state_create(struct pipe_context *pipe,
539 const struct pipe_shader_state *cso, unsigned type)
540 {
541 struct nvc0_program *prog;
542
543 prog = CALLOC_STRUCT(nvc0_program);
544 if (!prog)
545 return NULL;
546
547 prog->type = type;
548
549 if (cso->tokens)
550 prog->pipe.tokens = tgsi_dup_tokens(cso->tokens);
551
552 if (cso->stream_output.num_outputs)
553 prog->pipe.stream_output = cso->stream_output;
554
555 return (void *)prog;
556 }
557
558 static void
559 nvc0_sp_state_delete(struct pipe_context *pipe, void *hwcso)
560 {
561 struct nvc0_program *prog = (struct nvc0_program *)hwcso;
562
563 nvc0_program_destroy(nvc0_context(pipe), prog);
564
565 FREE((void *)prog->pipe.tokens);
566 FREE(prog);
567 }
568
569 static void *
570 nvc0_vp_state_create(struct pipe_context *pipe,
571 const struct pipe_shader_state *cso)
572 {
573 return nvc0_sp_state_create(pipe, cso, PIPE_SHADER_VERTEX);
574 }
575
576 static void
577 nvc0_vp_state_bind(struct pipe_context *pipe, void *hwcso)
578 {
579 struct nvc0_context *nvc0 = nvc0_context(pipe);
580
581 nvc0->vertprog = hwcso;
582 nvc0->dirty |= NVC0_NEW_VERTPROG;
583 }
584
585 static void *
586 nvc0_fp_state_create(struct pipe_context *pipe,
587 const struct pipe_shader_state *cso)
588 {
589 return nvc0_sp_state_create(pipe, cso, PIPE_SHADER_FRAGMENT);
590 }
591
592 static void
593 nvc0_fp_state_bind(struct pipe_context *pipe, void *hwcso)
594 {
595 struct nvc0_context *nvc0 = nvc0_context(pipe);
596
597 nvc0->fragprog = hwcso;
598 nvc0->dirty |= NVC0_NEW_FRAGPROG;
599 }
600
601 static void *
602 nvc0_gp_state_create(struct pipe_context *pipe,
603 const struct pipe_shader_state *cso)
604 {
605 return nvc0_sp_state_create(pipe, cso, PIPE_SHADER_GEOMETRY);
606 }
607
608 static void
609 nvc0_gp_state_bind(struct pipe_context *pipe, void *hwcso)
610 {
611 struct nvc0_context *nvc0 = nvc0_context(pipe);
612
613 nvc0->gmtyprog = hwcso;
614 nvc0->dirty |= NVC0_NEW_GMTYPROG;
615 }
616
617 static void
618 nvc0_set_constant_buffer(struct pipe_context *pipe, uint shader, uint index,
619 struct pipe_constant_buffer *cb)
620 {
621 struct nvc0_context *nvc0 = nvc0_context(pipe);
622 struct pipe_resource *res = cb ? cb->buffer : NULL;
623 const unsigned s = nvc0_shader_stage(shader);
624 const unsigned i = index;
625
626 if (shader == PIPE_SHADER_COMPUTE)
627 return;
628
629 if (nvc0->constbuf[s][i].user)
630 nvc0->constbuf[s][i].u.buf = NULL;
631 else
632 if (nvc0->constbuf[s][i].u.buf)
633 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_CB(s, i));
634
635 pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, res);
636
637 nvc0->constbuf[s][i].user = (cb && cb->user_buffer) ? TRUE : FALSE;
638 if (nvc0->constbuf[s][i].user) {
639 nvc0->constbuf[s][i].u.data = cb->user_buffer;
640 nvc0->constbuf[s][i].size = cb->buffer_size;
641 } else
642 if (cb) {
643 nvc0->constbuf[s][i].offset = cb->buffer_offset;
644 nvc0->constbuf[s][i].size = align(cb->buffer_size, 0x100);
645 }
646
647 nvc0->constbuf_dirty[s] |= 1 << i;
648
649 nvc0->dirty |= NVC0_NEW_CONSTBUF;
650 }
651
652 /* =============================================================================
653 */
654
655 static void
656 nvc0_set_blend_color(struct pipe_context *pipe,
657 const struct pipe_blend_color *bcol)
658 {
659 struct nvc0_context *nvc0 = nvc0_context(pipe);
660
661 nvc0->blend_colour = *bcol;
662 nvc0->dirty |= NVC0_NEW_BLEND_COLOUR;
663 }
664
665 static void
666 nvc0_set_stencil_ref(struct pipe_context *pipe,
667 const struct pipe_stencil_ref *sr)
668 {
669 struct nvc0_context *nvc0 = nvc0_context(pipe);
670
671 nvc0->stencil_ref = *sr;
672 nvc0->dirty |= NVC0_NEW_STENCIL_REF;
673 }
674
675 static void
676 nvc0_set_clip_state(struct pipe_context *pipe,
677 const struct pipe_clip_state *clip)
678 {
679 struct nvc0_context *nvc0 = nvc0_context(pipe);
680
681 memcpy(nvc0->clip.ucp, clip->ucp, sizeof(clip->ucp));
682
683 nvc0->dirty |= NVC0_NEW_CLIP;
684 }
685
686 static void
687 nvc0_set_sample_mask(struct pipe_context *pipe, unsigned sample_mask)
688 {
689 struct nvc0_context *nvc0 = nvc0_context(pipe);
690
691 nvc0->sample_mask = sample_mask;
692 nvc0->dirty |= NVC0_NEW_SAMPLE_MASK;
693 }
694
695
696 static void
697 nvc0_set_framebuffer_state(struct pipe_context *pipe,
698 const struct pipe_framebuffer_state *fb)
699 {
700 struct nvc0_context *nvc0 = nvc0_context(pipe);
701 unsigned i;
702
703 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_FB);
704
705 for (i = 0; i < fb->nr_cbufs; ++i)
706 pipe_surface_reference(&nvc0->framebuffer.cbufs[i], fb->cbufs[i]);
707 for (; i < nvc0->framebuffer.nr_cbufs; ++i)
708 pipe_surface_reference(&nvc0->framebuffer.cbufs[i], NULL);
709
710 nvc0->framebuffer.nr_cbufs = fb->nr_cbufs;
711
712 nvc0->framebuffer.width = fb->width;
713 nvc0->framebuffer.height = fb->height;
714
715 pipe_surface_reference(&nvc0->framebuffer.zsbuf, fb->zsbuf);
716
717 nvc0->dirty |= NVC0_NEW_FRAMEBUFFER;
718 }
719
720 static void
721 nvc0_set_polygon_stipple(struct pipe_context *pipe,
722 const struct pipe_poly_stipple *stipple)
723 {
724 struct nvc0_context *nvc0 = nvc0_context(pipe);
725
726 nvc0->stipple = *stipple;
727 nvc0->dirty |= NVC0_NEW_STIPPLE;
728 }
729
730 static void
731 nvc0_set_scissor_state(struct pipe_context *pipe,
732 const struct pipe_scissor_state *scissor)
733 {
734 struct nvc0_context *nvc0 = nvc0_context(pipe);
735
736 nvc0->scissor = *scissor;
737 nvc0->dirty |= NVC0_NEW_SCISSOR;
738 }
739
740 static void
741 nvc0_set_viewport_state(struct pipe_context *pipe,
742 const struct pipe_viewport_state *vpt)
743 {
744 struct nvc0_context *nvc0 = nvc0_context(pipe);
745
746 nvc0->viewport = *vpt;
747 nvc0->dirty |= NVC0_NEW_VIEWPORT;
748 }
749
750 static void
751 nvc0_set_vertex_buffers(struct pipe_context *pipe,
752 unsigned count,
753 const struct pipe_vertex_buffer *vb)
754 {
755 struct nvc0_context *nvc0 = nvc0_context(pipe);
756 uint32_t constant_vbos = 0;
757 unsigned i;
758
759 if (count != nvc0->num_vtxbufs) {
760 for (i = 0; i < count; ++i) {
761 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, vb[i].buffer);
762 nvc0->vtxbuf[i].buffer_offset = vb[i].buffer_offset;
763 nvc0->vtxbuf[i].stride = vb[i].stride;
764 if (!vb[i].stride)
765 constant_vbos |= 1 << i;
766 }
767 for (; i < nvc0->num_vtxbufs; ++i)
768 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, NULL);
769
770 nvc0->num_vtxbufs = count;
771 nvc0->dirty |= NVC0_NEW_ARRAYS;
772 } else {
773 for (i = 0; i < count; ++i) {
774 if (nvc0->vtxbuf[i].buffer == vb[i].buffer &&
775 nvc0->vtxbuf[i].buffer_offset == vb[i].buffer_offset &&
776 nvc0->vtxbuf[i].stride == vb[i].stride)
777 continue;
778 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, vb[i].buffer);
779 nvc0->vtxbuf[i].buffer_offset = vb[i].buffer_offset;
780 nvc0->vtxbuf[i].stride = vb[i].stride;
781 if (likely(vb[i].stride))
782 nvc0->dirty |= NVC0_NEW_ARRAYS;
783 else
784 constant_vbos |= 1 << i;
785 }
786 }
787 if (constant_vbos != nvc0->constant_vbos) {
788 nvc0->constant_vbos = constant_vbos;
789 nvc0->dirty |= NVC0_NEW_ARRAYS;
790 }
791
792 if (nvc0->dirty & NVC0_NEW_ARRAYS)
793 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_VTX);
794 }
795
796 static void
797 nvc0_set_index_buffer(struct pipe_context *pipe,
798 const struct pipe_index_buffer *ib)
799 {
800 struct nvc0_context *nvc0 = nvc0_context(pipe);
801
802 if (nvc0->idxbuf.buffer)
803 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_IDX);
804
805 if (ib && ib->buffer) {
806 nvc0->dirty |= NVC0_NEW_IDXBUF;
807 pipe_resource_reference(&nvc0->idxbuf.buffer, ib->buffer);
808 nvc0->idxbuf.offset = ib->offset;
809 nvc0->idxbuf.index_size = ib->index_size;
810 } else {
811 nvc0->dirty &= ~NVC0_NEW_IDXBUF;
812 pipe_resource_reference(&nvc0->idxbuf.buffer, NULL);
813 }
814 }
815
816 static void
817 nvc0_vertex_state_bind(struct pipe_context *pipe, void *hwcso)
818 {
819 struct nvc0_context *nvc0 = nvc0_context(pipe);
820
821 nvc0->vertex = hwcso;
822 nvc0->dirty |= NVC0_NEW_VERTEX;
823 }
824
825 static struct pipe_stream_output_target *
826 nvc0_so_target_create(struct pipe_context *pipe,
827 struct pipe_resource *res,
828 unsigned offset, unsigned size)
829 {
830 struct nvc0_so_target *targ = MALLOC_STRUCT(nvc0_so_target);
831 if (!targ)
832 return NULL;
833
834 targ->pq = pipe->create_query(pipe, NVC0_QUERY_TFB_BUFFER_OFFSET);
835 if (!targ->pq) {
836 FREE(targ);
837 return NULL;
838 }
839 targ->clean = TRUE;
840
841 targ->pipe.buffer_size = size;
842 targ->pipe.buffer_offset = offset;
843 targ->pipe.context = pipe;
844 targ->pipe.buffer = NULL;
845 pipe_resource_reference(&targ->pipe.buffer, res);
846 pipe_reference_init(&targ->pipe.reference, 1);
847
848 return &targ->pipe;
849 }
850
851 static void
852 nvc0_so_target_destroy(struct pipe_context *pipe,
853 struct pipe_stream_output_target *ptarg)
854 {
855 struct nvc0_so_target *targ = nvc0_so_target(ptarg);
856 pipe->destroy_query(pipe, targ->pq);
857 FREE(targ);
858 }
859
860 static void
861 nvc0_set_transform_feedback_targets(struct pipe_context *pipe,
862 unsigned num_targets,
863 struct pipe_stream_output_target **targets,
864 unsigned append_mask)
865 {
866 struct nvc0_context *nvc0 = nvc0_context(pipe);
867 unsigned i;
868 boolean serialize = TRUE;
869
870 assert(num_targets <= 4);
871
872 for (i = 0; i < num_targets; ++i) {
873 if (nvc0->tfbbuf[i] == targets[i] && (append_mask & (1 << i)))
874 continue;
875 nvc0->tfbbuf_dirty |= 1 << i;
876
877 if (nvc0->tfbbuf[i] && nvc0->tfbbuf[i] != targets[i])
878 nvc0_so_target_save_offset(pipe, nvc0->tfbbuf[i], i, &serialize);
879
880 if (targets[i] && !(append_mask & (1 << i)))
881 nvc0_so_target(targets[i])->clean = TRUE;
882
883 pipe_so_target_reference(&nvc0->tfbbuf[i], targets[i]);
884 }
885 for (; i < nvc0->num_tfbbufs; ++i) {
886 nvc0->tfbbuf_dirty |= 1 << i;
887 nvc0_so_target_save_offset(pipe, nvc0->tfbbuf[i], i, &serialize);
888 pipe_so_target_reference(&nvc0->tfbbuf[i], NULL);
889 }
890 nvc0->num_tfbbufs = num_targets;
891
892 if (nvc0->tfbbuf_dirty)
893 nvc0->dirty |= NVC0_NEW_TFB_TARGETS;
894 }
895
896 void
897 nvc0_init_state_functions(struct nvc0_context *nvc0)
898 {
899 struct pipe_context *pipe = &nvc0->base.pipe;
900
901 pipe->create_blend_state = nvc0_blend_state_create;
902 pipe->bind_blend_state = nvc0_blend_state_bind;
903 pipe->delete_blend_state = nvc0_blend_state_delete;
904
905 pipe->create_rasterizer_state = nvc0_rasterizer_state_create;
906 pipe->bind_rasterizer_state = nvc0_rasterizer_state_bind;
907 pipe->delete_rasterizer_state = nvc0_rasterizer_state_delete;
908
909 pipe->create_depth_stencil_alpha_state = nvc0_zsa_state_create;
910 pipe->bind_depth_stencil_alpha_state = nvc0_zsa_state_bind;
911 pipe->delete_depth_stencil_alpha_state = nvc0_zsa_state_delete;
912
913 pipe->create_sampler_state = nv50_sampler_state_create;
914 pipe->delete_sampler_state = nvc0_sampler_state_delete;
915 pipe->bind_vertex_sampler_states = nvc0_vp_sampler_states_bind;
916 pipe->bind_fragment_sampler_states = nvc0_fp_sampler_states_bind;
917 pipe->bind_geometry_sampler_states = nvc0_gp_sampler_states_bind;
918
919 pipe->create_sampler_view = nvc0_create_sampler_view;
920 pipe->sampler_view_destroy = nvc0_sampler_view_destroy;
921 pipe->set_vertex_sampler_views = nvc0_vp_set_sampler_views;
922 pipe->set_fragment_sampler_views = nvc0_fp_set_sampler_views;
923 pipe->set_geometry_sampler_views = nvc0_gp_set_sampler_views;
924
925 pipe->create_vs_state = nvc0_vp_state_create;
926 pipe->create_fs_state = nvc0_fp_state_create;
927 pipe->create_gs_state = nvc0_gp_state_create;
928 pipe->bind_vs_state = nvc0_vp_state_bind;
929 pipe->bind_fs_state = nvc0_fp_state_bind;
930 pipe->bind_gs_state = nvc0_gp_state_bind;
931 pipe->delete_vs_state = nvc0_sp_state_delete;
932 pipe->delete_fs_state = nvc0_sp_state_delete;
933 pipe->delete_gs_state = nvc0_sp_state_delete;
934
935 pipe->set_blend_color = nvc0_set_blend_color;
936 pipe->set_stencil_ref = nvc0_set_stencil_ref;
937 pipe->set_clip_state = nvc0_set_clip_state;
938 pipe->set_sample_mask = nvc0_set_sample_mask;
939 pipe->set_constant_buffer = nvc0_set_constant_buffer;
940 pipe->set_framebuffer_state = nvc0_set_framebuffer_state;
941 pipe->set_polygon_stipple = nvc0_set_polygon_stipple;
942 pipe->set_scissor_state = nvc0_set_scissor_state;
943 pipe->set_viewport_state = nvc0_set_viewport_state;
944
945 pipe->create_vertex_elements_state = nvc0_vertex_state_create;
946 pipe->delete_vertex_elements_state = nvc0_vertex_state_delete;
947 pipe->bind_vertex_elements_state = nvc0_vertex_state_bind;
948
949 pipe->set_vertex_buffers = nvc0_set_vertex_buffers;
950 pipe->set_index_buffer = nvc0_set_index_buffer;
951
952 pipe->create_stream_output_target = nvc0_so_target_create;
953 pipe->stream_output_target_destroy = nvc0_so_target_destroy;
954 pipe->set_stream_output_targets = nvc0_set_transform_feedback_targets;
955 }
956