2 #include "nvc0_context.h"
3 #include "os/os_time.h"
6 nvc0_validate_zcull(struct nvc0_context
*nvc0
)
8 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
9 struct pipe_framebuffer_state
*fb
= &nvc0
->framebuffer
;
10 struct nvc0_surface
*sf
= nvc0_surface(fb
->zsbuf
);
11 struct nvc0_miptree
*mt
= nvc0_miptree(sf
->base
.texture
);
12 struct nouveau_bo
*bo
= mt
->base
.bo
;
14 uint32_t offset
= align(mt
->total_size
, 1 << 17);
15 unsigned width
, height
;
17 assert(mt
->base
.base
.depth0
== 1 && mt
->base
.base
.array_size
< 2);
19 size
= mt
->total_size
* 2;
21 height
= align(fb
->height
, 32);
22 width
= fb
->width
% 224;
24 width
= fb
->width
+ (224 - width
);
28 MARK_RING (chan
, 23, 4);
29 BEGIN_RING(chan
, RING_3D_(0x1590), 1); /* ZCULL_REGION_INDEX (bits 0x3f) */
31 BEGIN_RING(chan
, RING_3D_(0x07e8), 2); /* ZCULL_ADDRESS_A_HIGH */
32 OUT_RELOCh(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
33 OUT_RELOCl(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
35 BEGIN_RING(chan
, RING_3D_(0x07f0), 2); /* ZCULL_ADDRESS_B_HIGH */
36 OUT_RELOCh(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
37 OUT_RELOCl(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
38 BEGIN_RING(chan
, RING_3D_(0x07e0), 2);
39 OUT_RING (chan
, size
);
40 OUT_RING (chan
, size
>> 16);
41 BEGIN_RING(chan
, RING_3D_(0x15c8), 1); /* bits 0x3 */
43 BEGIN_RING(chan
, RING_3D_(0x07c0), 4); /* ZCULL dimensions */
44 OUT_RING (chan
, width
);
45 OUT_RING (chan
, height
);
48 BEGIN_RING(chan
, RING_3D_(0x15fc), 2);
49 OUT_RING (chan
, 0); /* bits 0xffff */
50 OUT_RING (chan
, 0); /* bits 0xffff */
51 BEGIN_RING(chan
, RING_3D_(0x1958), 1);
52 OUT_RING (chan
, 0); /* bits ~0 */
56 nvc0_validate_fb(struct nvc0_context
*nvc0
)
58 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
59 struct pipe_framebuffer_state
*fb
= &nvc0
->framebuffer
;
61 boolean serialize
= FALSE
;
63 nvc0_bufctx_reset(nvc0
, NVC0_BUFCTX_FRAME
);
65 BEGIN_RING(chan
, RING_3D(RT_CONTROL
), 1);
66 OUT_RING (chan
, (076543210 << 4) | fb
->nr_cbufs
);
67 BEGIN_RING(chan
, RING_3D(SCREEN_SCISSOR_HORIZ
), 2);
68 OUT_RING (chan
, fb
->width
<< 16);
69 OUT_RING (chan
, fb
->height
<< 16);
71 MARK_RING(chan
, 9 * fb
->nr_cbufs
, 2 * fb
->nr_cbufs
);
73 for (i
= 0; i
< fb
->nr_cbufs
; ++i
) {
74 struct nvc0_miptree
*mt
= nvc0_miptree(fb
->cbufs
[i
]->texture
);
75 struct nvc0_surface
*sf
= nvc0_surface(fb
->cbufs
[i
]);
76 struct nouveau_bo
*bo
= mt
->base
.bo
;
77 uint32_t offset
= sf
->offset
;
79 BEGIN_RING(chan
, RING_3D(RT_ADDRESS_HIGH(i
)), 9);
80 OUT_RELOCh(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
81 OUT_RELOCl(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
82 OUT_RING (chan
, sf
->width
);
83 OUT_RING (chan
, sf
->height
);
84 OUT_RING (chan
, nvc0_format_table
[sf
->base
.format
].rt
);
85 OUT_RING (chan
, (mt
->layout_3d
<< 16) |
86 mt
->level
[sf
->base
.u
.tex
.level
].tile_mode
);
87 OUT_RING (chan
, sf
->base
.u
.tex
.first_layer
+ sf
->depth
);
88 OUT_RING (chan
, mt
->layer_stride
>> 2);
89 OUT_RING (chan
, sf
->base
.u
.tex
.first_layer
);
91 if (mt
->base
.status
& NOUVEAU_BUFFER_STATUS_GPU_READING
)
93 mt
->base
.status
|= NOUVEAU_BUFFER_STATUS_GPU_WRITING
;
94 mt
->base
.status
&= ~NOUVEAU_BUFFER_STATUS_GPU_READING
;
96 nvc0_bufctx_add_resident(nvc0
, NVC0_BUFCTX_FRAME
, &mt
->base
,
97 NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
101 struct nvc0_miptree
*mt
= nvc0_miptree(fb
->zsbuf
->texture
);
102 struct nvc0_surface
*sf
= nvc0_surface(fb
->zsbuf
);
103 struct nouveau_bo
*bo
= mt
->base
.bo
;
104 int unk
= mt
->base
.base
.target
== PIPE_TEXTURE_2D
;
105 uint32_t offset
= sf
->offset
;
107 MARK_RING (chan
, 12, 2);
108 BEGIN_RING(chan
, RING_3D(ZETA_ADDRESS_HIGH
), 5);
109 OUT_RELOCh(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
110 OUT_RELOCl(chan
, bo
, offset
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
111 OUT_RING (chan
, nvc0_format_table
[fb
->zsbuf
->format
].rt
);
112 OUT_RING (chan
, mt
->level
[sf
->base
.u
.tex
.level
].tile_mode
);
113 OUT_RING (chan
, mt
->layer_stride
>> 2);
114 BEGIN_RING(chan
, RING_3D(ZETA_ENABLE
), 1);
116 BEGIN_RING(chan
, RING_3D(ZETA_HORIZ
), 3);
117 OUT_RING (chan
, sf
->width
);
118 OUT_RING (chan
, sf
->height
);
119 OUT_RING (chan
, (unk
<< 16) |
120 (sf
->base
.u
.tex
.first_layer
+ sf
->depth
));
121 BEGIN_RING(chan
, RING_3D(ZETA_BASE_LAYER
), 1);
122 OUT_RING (chan
, sf
->base
.u
.tex
.first_layer
);
124 if (mt
->base
.status
& NOUVEAU_BUFFER_STATUS_GPU_READING
)
126 mt
->base
.status
|= NOUVEAU_BUFFER_STATUS_GPU_WRITING
;
127 mt
->base
.status
&= ~NOUVEAU_BUFFER_STATUS_GPU_READING
;
129 nvc0_bufctx_add_resident(nvc0
, NVC0_BUFCTX_FRAME
, &mt
->base
,
130 NOUVEAU_BO_VRAM
| NOUVEAU_BO_RDWR
);
132 BEGIN_RING(chan
, RING_3D(ZETA_ENABLE
), 1);
137 BEGIN_RING(chan
, RING_3D(SERIALIZE
), 1);
143 nvc0_validate_blend_colour(struct nvc0_context
*nvc0
)
145 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
147 BEGIN_RING(chan
, RING_3D(BLEND_COLOR(0)), 4);
148 OUT_RINGf (chan
, nvc0
->blend_colour
.color
[0]);
149 OUT_RINGf (chan
, nvc0
->blend_colour
.color
[1]);
150 OUT_RINGf (chan
, nvc0
->blend_colour
.color
[2]);
151 OUT_RINGf (chan
, nvc0
->blend_colour
.color
[3]);
155 nvc0_validate_stencil_ref(struct nvc0_context
*nvc0
)
157 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
159 BEGIN_RING(chan
, RING_3D(STENCIL_FRONT_FUNC_REF
), 1);
160 OUT_RING (chan
, nvc0
->stencil_ref
.ref_value
[0]);
161 BEGIN_RING(chan
, RING_3D(STENCIL_BACK_FUNC_REF
), 1);
162 OUT_RING (chan
, nvc0
->stencil_ref
.ref_value
[1]);
166 nvc0_validate_stipple(struct nvc0_context
*nvc0
)
168 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
171 BEGIN_RING(chan
, RING_3D(POLYGON_STIPPLE_PATTERN(0)), 32);
172 for (i
= 0; i
< 32; ++i
)
173 OUT_RING(chan
, util_bswap32(nvc0
->stipple
.stipple
[i
]));
177 nvc0_validate_scissor(struct nvc0_context
*nvc0
)
179 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
180 struct pipe_scissor_state
*s
= &nvc0
->scissor
;
182 if (!(nvc0
->dirty
& NVC0_NEW_SCISSOR
) &&
183 nvc0
->rast
->pipe
.scissor
== nvc0
->state
.scissor
)
185 nvc0
->state
.scissor
= nvc0
->rast
->pipe
.scissor
;
187 BEGIN_RING(chan
, RING_3D(SCISSOR_HORIZ(0)), 2);
188 if (nvc0
->rast
->pipe
.scissor
) {
189 OUT_RING(chan
, (s
->maxx
<< 16) | s
->minx
);
190 OUT_RING(chan
, (s
->maxy
<< 16) | s
->miny
);
192 OUT_RING(chan
, (0xffff << 16) | 0);
193 OUT_RING(chan
, (0xffff << 16) | 0);
198 nvc0_validate_viewport(struct nvc0_context
*nvc0
)
200 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
201 struct pipe_viewport_state
*vp
= &nvc0
->viewport
;
205 BEGIN_RING(chan
, RING_3D(VIEWPORT_TRANSLATE_X(0)), 3);
206 OUT_RINGf (chan
, vp
->translate
[0]);
207 OUT_RINGf (chan
, vp
->translate
[1]);
208 OUT_RINGf (chan
, vp
->translate
[2]);
209 BEGIN_RING(chan
, RING_3D(VIEWPORT_SCALE_X(0)), 3);
210 OUT_RINGf (chan
, vp
->scale
[0]);
211 OUT_RINGf (chan
, vp
->scale
[1]);
212 OUT_RINGf (chan
, vp
->scale
[2]);
214 /* now set the viewport rectangle to viewport dimensions for clipping */
216 x
= (int)(vp
->translate
[0] - fabsf(vp
->scale
[0]));
217 y
= (int)(vp
->translate
[1] - fabsf(vp
->scale
[1]));
218 w
= (int)fabsf(2.0f
* vp
->scale
[0]);
219 h
= (int)fabsf(2.0f
* vp
->scale
[1]);
220 zmin
= vp
->translate
[2] - fabsf(vp
->scale
[2]);
221 zmax
= vp
->translate
[2] + fabsf(vp
->scale
[2]);
223 BEGIN_RING(chan
, RING_3D(VIEWPORT_HORIZ(0)), 2);
224 OUT_RING (chan
, (w
<< 16) | x
);
225 OUT_RING (chan
, (h
<< 16) | y
);
226 BEGIN_RING(chan
, RING_3D(DEPTH_RANGE_NEAR(0)), 2);
227 OUT_RINGf (chan
, zmin
);
228 OUT_RINGf (chan
, zmax
);
232 nvc0_validate_clip(struct nvc0_context
*nvc0
)
234 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
237 if (nvc0
->clip
.depth_clamp
) {
239 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1
|
240 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_NEAR
|
241 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_FAR
|
242 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK12_UNK2
;
244 clip
= NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1
;
247 BEGIN_RING(chan
, RING_3D(VIEW_VOLUME_CLIP_CTRL
), 1);
248 OUT_RING (chan
, clip
);
251 struct nouveau_bo
*bo
= nvc0
->screen
->uniforms
;
253 MARK_RING (chan
, 6 + nvc0
->clip
.nr
* 4, 2);
254 BEGIN_RING(chan
, RING_3D(CB_SIZE
), 3);
255 OUT_RING (chan
, 256);
256 OUT_RELOCh(chan
, bo
, 5 << 16, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RD
);
257 OUT_RELOCl(chan
, bo
, 5 << 16, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RD
);
258 BEGIN_RING_1I(chan
, RING_3D(CB_POS
), nvc0
->clip
.nr
* 4 + 1);
260 OUT_RINGp (chan
, &nvc0
->clip
.ucp
[0][0], nvc0
->clip
.nr
* 4);
262 BEGIN_RING(chan
, RING_3D(VP_CLIP_DISTANCE_ENABLE
), 1);
263 OUT_RING (chan
, (1 << nvc0
->clip
.nr
) - 1);
265 IMMED_RING(chan
, RING_3D(VP_CLIP_DISTANCE_ENABLE
), 0);
270 nvc0_validate_blend(struct nvc0_context
*nvc0
)
272 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
274 WAIT_RING(chan
, nvc0
->blend
->size
);
275 OUT_RINGp(chan
, nvc0
->blend
->state
, nvc0
->blend
->size
);
279 nvc0_validate_zsa(struct nvc0_context
*nvc0
)
281 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
283 WAIT_RING(chan
, nvc0
->zsa
->size
);
284 OUT_RINGp(chan
, nvc0
->zsa
->state
, nvc0
->zsa
->size
);
288 nvc0_validate_rasterizer(struct nvc0_context
*nvc0
)
290 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
292 WAIT_RING(chan
, nvc0
->rast
->size
);
293 OUT_RINGp(chan
, nvc0
->rast
->state
, nvc0
->rast
->size
);
297 nvc0_constbufs_validate(struct nvc0_context
*nvc0
)
299 struct nouveau_channel
*chan
= nvc0
->screen
->base
.channel
;
300 struct nouveau_bo
*bo
;
303 for (s
= 0; s
< 5; ++s
) {
304 struct nv04_resource
*res
;
307 while (nvc0
->constbuf_dirty
[s
]) {
309 unsigned offset
= 0, words
= 0;
310 boolean rebind
= TRUE
;
312 i
= ffs(nvc0
->constbuf_dirty
[s
]) - 1;
313 nvc0
->constbuf_dirty
[s
] &= ~(1 << i
);
315 res
= nv04_resource(nvc0
->constbuf
[s
][i
]);
317 BEGIN_RING(chan
, RING_3D(CB_BIND(s
)), 1);
318 OUT_RING (chan
, (i
<< 4) | 0);
320 nvc0
->state
.uniform_buffer_bound
[s
] = 0;
324 if (!nouveau_resource_mapped_by_gpu(&res
->base
)) {
327 bo
= nvc0
->screen
->uniforms
;
329 if (nvc0
->state
.uniform_buffer_bound
[s
] >= res
->base
.width0
)
332 nvc0
->state
.uniform_buffer_bound
[s
] =
333 align(res
->base
.width0
, 0x100);
338 nvc0_m2mf_push_linear(nvc0
, bo
, NOUVEAU_BO_VRAM
,
339 base
, res
->base
.width0
, res
->data
);
340 BEGIN_RING(chan
, RING_3D_(0x021c), 1);
341 OUT_RING (chan
, 0x1111);
343 words
= res
->base
.width0
/ 4;
348 nvc0
->state
.uniform_buffer_bound
[s
] = 0;
351 if (bo
!= nvc0
->screen
->uniforms
)
352 nvc0_bufctx_add_resident(nvc0
, NVC0_BUFCTX_CONSTANT
, res
,
353 NOUVEAU_BO_VRAM
| NOUVEAU_BO_RD
);
356 MARK_RING (chan
, 4, 2);
357 BEGIN_RING(chan
, RING_3D(CB_SIZE
), 3);
358 OUT_RING (chan
, align(res
->base
.width0
, 0x100));
359 OUT_RELOCh(chan
, bo
, base
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RD
);
360 OUT_RELOCl(chan
, bo
, base
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RD
);
361 BEGIN_RING(chan
, RING_3D(CB_BIND(s
)), 1);
362 OUT_RING (chan
, (i
<< 4) | 1);
366 unsigned nr
= AVAIL_RING(chan
);
372 nr
= MIN2(MIN2(nr
- 6, words
), NV04_PFIFO_MAX_PACKET_LEN
- 1);
374 MARK_RING (chan
, nr
+ 5, 2);
375 BEGIN_RING(chan
, RING_3D(CB_SIZE
), 3);
376 OUT_RING (chan
, align(res
->base
.width0
, 0x100));
377 OUT_RELOCh(chan
, bo
, base
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RD
);
378 OUT_RELOCl(chan
, bo
, base
, NOUVEAU_BO_VRAM
| NOUVEAU_BO_RD
);
379 BEGIN_RING_1I(chan
, RING_3D(CB_POS
), nr
+ 1);
380 OUT_RING (chan
, offset
);
381 OUT_RINGp (chan
, &res
->data
[offset
], nr
);
390 static struct state_validate
{
391 void (*func
)(struct nvc0_context
*);
393 } validate_list
[] = {
394 { nvc0_validate_fb
, NVC0_NEW_FRAMEBUFFER
},
395 { nvc0_validate_blend
, NVC0_NEW_BLEND
},
396 { nvc0_validate_zsa
, NVC0_NEW_ZSA
},
397 { nvc0_validate_rasterizer
, NVC0_NEW_RASTERIZER
},
398 { nvc0_validate_blend_colour
, NVC0_NEW_BLEND_COLOUR
},
399 { nvc0_validate_stencil_ref
, NVC0_NEW_STENCIL_REF
},
400 { nvc0_validate_stipple
, NVC0_NEW_STIPPLE
},
401 { nvc0_validate_scissor
, NVC0_NEW_SCISSOR
| NVC0_NEW_RASTERIZER
},
402 { nvc0_validate_viewport
, NVC0_NEW_VIEWPORT
},
403 { nvc0_validate_clip
, NVC0_NEW_CLIP
},
404 { nvc0_vertprog_validate
, NVC0_NEW_VERTPROG
},
405 { nvc0_tctlprog_validate
, NVC0_NEW_TCTLPROG
},
406 { nvc0_tevlprog_validate
, NVC0_NEW_TEVLPROG
},
407 { nvc0_gmtyprog_validate
, NVC0_NEW_GMTYPROG
},
408 { nvc0_fragprog_validate
, NVC0_NEW_FRAGPROG
},
409 { nvc0_constbufs_validate
, NVC0_NEW_CONSTBUF
},
410 { nvc0_validate_textures
, NVC0_NEW_TEXTURES
},
411 { nvc0_validate_samplers
, NVC0_NEW_SAMPLERS
},
412 { nvc0_vertex_arrays_validate
, NVC0_NEW_VERTEX
| NVC0_NEW_ARRAYS
},
413 { nvc0_tfb_validate
, NVC0_NEW_TFB
| NVC0_NEW_TFB_BUFFERS
}
415 #define validate_list_len (sizeof(validate_list) / sizeof(validate_list[0]))
418 nvc0_state_validate(struct nvc0_context
*nvc0
)
422 if (nvc0
->screen
->cur_ctx
!= nvc0
) /* FIXME: not everything is valid */
423 nvc0
->dirty
= 0xffffffff;
425 nvc0
->screen
->cur_ctx
= nvc0
;
428 for (i
= 0; i
< validate_list_len
; ++i
) {
429 struct state_validate
*validate
= &validate_list
[i
];
431 if (nvc0
->dirty
& validate
->states
)
432 validate
->func(nvc0
);
437 nvc0_bufctx_emit_relocs(nvc0
);