2 * Copyright (C) 2008-2009 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included
12 * in all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
18 * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
19 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 * Richard Li <RichardZ.Li@amd.com>, <richardradeon@gmail.com>
25 * CooperYuan <cooper.yuan@amd.com>, <cooperyuan@gmail.com>
28 #include "main/imports.h"
29 #include "main/glheader.h"
31 #include "r600_context.h"
32 #include "r600_cmdbuf.h"
34 #include "r700_state.h"
36 #include "r700_oglprog.h"
37 #include "r700_fragprog.h"
38 #include "r700_vertprog.h"
39 #include "r700_ioctl.h"
41 #define LINK_STATES(reg) \
44 pStateListWork->puiValue = (unsigned int*)&(r700->reg); \
45 pStateListWork->unOffset = mm##reg - ASIC_CONTEXT_BASE_INDEX; \
46 pStateListWork->pNext = pStateListWork + 1; \
50 GLboolean
r700InitChipObject(context_t
*context
)
52 ContextState
* pStateListWork
;
54 R700_CHIP_CONTEXT
*r700
= &context
->hw
;
57 r700
->pStateList
= (ContextState
*) MALLOC (sizeof(ContextState
)*sizeof(R700_CHIP_CONTEXT
)/sizeof(unsigned int));
58 pStateListWork
= r700
->pStateList
;
61 LINK_STATES(TA_CNTL_AUX
);
62 LINK_STATES(VC_ENHANCE
);
63 LINK_STATES(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ
);
64 LINK_STATES(DB_DEBUG
);
65 LINK_STATES(DB_WATERMARKS
);
68 LINK_STATES(PA_SC_SCREEN_SCISSOR_TL
);
69 LINK_STATES(PA_SC_SCREEN_SCISSOR_BR
);
70 LINK_STATES(PA_SC_WINDOW_OFFSET
);
71 LINK_STATES(PA_SC_WINDOW_SCISSOR_TL
);
72 LINK_STATES(PA_SC_WINDOW_SCISSOR_BR
);
73 LINK_STATES(PA_SC_CLIPRECT_RULE
);
74 LINK_STATES(PA_SC_CLIPRECT_0_TL
);
75 LINK_STATES(PA_SC_CLIPRECT_0_BR
);
76 LINK_STATES(PA_SC_CLIPRECT_1_TL
);
77 LINK_STATES(PA_SC_CLIPRECT_1_BR
);
78 LINK_STATES(PA_SC_CLIPRECT_2_TL
);
79 LINK_STATES(PA_SC_CLIPRECT_2_BR
);
80 LINK_STATES(PA_SC_CLIPRECT_3_TL
);
81 LINK_STATES(PA_SC_CLIPRECT_3_BR
);
82 LINK_STATES(PA_SC_EDGERULE
);
83 LINK_STATES(PA_SC_GENERIC_SCISSOR_TL
);
84 LINK_STATES(PA_SC_GENERIC_SCISSOR_BR
);
85 LINK_STATES(PA_SC_LINE_STIPPLE
);
86 LINK_STATES(PA_SC_MPASS_PS_CNTL
);
87 LINK_STATES(PA_SC_MODE_CNTL
);
88 LINK_STATES(PA_SC_LINE_CNTL
);
89 LINK_STATES(PA_SC_AA_CONFIG
);
90 LINK_STATES(PA_SC_AA_SAMPLE_LOCS_MCTX
);
91 LINK_STATES(PA_SC_AA_SAMPLE_LOCS_8S_WD1_MCTX
);
92 LINK_STATES(PA_SC_AA_MASK
);
95 LINK_STATES(PA_SU_POINT_SIZE
);
96 LINK_STATES(PA_SU_POINT_MINMAX
);
97 LINK_STATES(PA_SU_LINE_CNTL
);
98 LINK_STATES(PA_SU_SC_MODE_CNTL
);
99 LINK_STATES(PA_SU_VTX_CNTL
);
100 LINK_STATES(PA_SU_POLY_OFFSET_DB_FMT_CNTL
);
101 LINK_STATES(PA_SU_POLY_OFFSET_CLAMP
);
102 LINK_STATES(PA_SU_POLY_OFFSET_FRONT_SCALE
);
103 LINK_STATES(PA_SU_POLY_OFFSET_FRONT_OFFSET
);
104 LINK_STATES(PA_SU_POLY_OFFSET_BACK_SCALE
);
105 LINK_STATES(PA_SU_POLY_OFFSET_BACK_OFFSET
);
108 LINK_STATES(PA_CL_CLIP_CNTL
);
109 LINK_STATES(PA_CL_VTE_CNTL
);
110 LINK_STATES(PA_CL_VS_OUT_CNTL
);
111 LINK_STATES(PA_CL_NANINF_CNTL
);
112 LINK_STATES(PA_CL_GB_VERT_CLIP_ADJ
);
113 LINK_STATES(PA_CL_GB_VERT_DISC_ADJ
);
114 LINK_STATES(PA_CL_GB_HORZ_CLIP_ADJ
);
115 LINK_STATES(PA_CL_GB_HORZ_DISC_ADJ
);
118 LINK_STATES(CB_CLEAR_RED_R6XX
);
119 LINK_STATES(CB_CLEAR_GREEN_R6XX
);
120 LINK_STATES(CB_CLEAR_BLUE_R6XX
);
121 LINK_STATES(CB_CLEAR_ALPHA_R6XX
);
122 LINK_STATES(CB_TARGET_MASK
);
123 LINK_STATES(CB_SHADER_MASK
);
124 LINK_STATES(CB_BLEND_RED
);
125 LINK_STATES(CB_BLEND_GREEN
);
126 LINK_STATES(CB_BLEND_BLUE
);
127 LINK_STATES(CB_BLEND_ALPHA
);
128 LINK_STATES(CB_FOG_RED_R6XX
);
129 LINK_STATES(CB_FOG_GREEN_R6XX
);
130 LINK_STATES(CB_FOG_BLUE_R6XX
);
131 LINK_STATES(CB_SHADER_CONTROL
);
132 LINK_STATES(CB_COLOR_CONTROL
);
133 LINK_STATES(CB_CLRCMP_CONTROL
);
134 LINK_STATES(CB_CLRCMP_SRC
);
135 LINK_STATES(CB_CLRCMP_DST
);
136 LINK_STATES(CB_CLRCMP_MSK
);
137 LINK_STATES(CB_BLEND_CONTROL
);
140 LINK_STATES(SX_MISC
);
141 LINK_STATES(SX_ALPHA_TEST_CONTROL
);
142 LINK_STATES(SX_ALPHA_REF
);
145 LINK_STATES(VGT_MAX_VTX_INDX
);
146 LINK_STATES(VGT_MIN_VTX_INDX
);
147 LINK_STATES(VGT_INDX_OFFSET
);
148 LINK_STATES(VGT_MULTI_PRIM_IB_RESET_INDX
);
149 LINK_STATES(VGT_OUTPUT_PATH_CNTL
);
150 LINK_STATES(VGT_HOS_CNTL
);
151 LINK_STATES(VGT_HOS_MAX_TESS_LEVEL
);
152 LINK_STATES(VGT_HOS_MIN_TESS_LEVEL
);
153 LINK_STATES(VGT_HOS_REUSE_DEPTH
);
154 LINK_STATES(VGT_GROUP_PRIM_TYPE
);
155 LINK_STATES(VGT_GROUP_FIRST_DECR
);
156 LINK_STATES(VGT_GROUP_DECR
);
157 LINK_STATES(VGT_GROUP_VECT_0_CNTL
);
158 LINK_STATES(VGT_GROUP_VECT_1_CNTL
);
159 LINK_STATES(VGT_GROUP_VECT_0_FMT_CNTL
);
160 LINK_STATES(VGT_GROUP_VECT_1_FMT_CNTL
);
161 LINK_STATES(VGT_GS_MODE
);
162 LINK_STATES(VGT_PRIMITIVEID_EN
);
163 LINK_STATES(VGT_MULTI_PRIM_IB_RESET_EN
);
164 LINK_STATES(VGT_INSTANCE_STEP_RATE_0
);
165 LINK_STATES(VGT_INSTANCE_STEP_RATE_1
);
166 LINK_STATES(VGT_STRMOUT_EN
);
167 LINK_STATES(VGT_REUSE_OFF
);
168 LINK_STATES(VGT_VTX_CNT_EN
);
169 LINK_STATES(VGT_STRMOUT_BUFFER_EN
);
171 LINK_STATES(SQ_VTX_SEMANTIC_0
);
172 LINK_STATES(SQ_VTX_SEMANTIC_1
);
173 LINK_STATES(SQ_VTX_SEMANTIC_2
);
174 LINK_STATES(SQ_VTX_SEMANTIC_3
);
175 LINK_STATES(SQ_VTX_SEMANTIC_4
);
176 LINK_STATES(SQ_VTX_SEMANTIC_5
);
177 LINK_STATES(SQ_VTX_SEMANTIC_6
);
178 LINK_STATES(SQ_VTX_SEMANTIC_7
);
179 LINK_STATES(SQ_VTX_SEMANTIC_8
);
180 LINK_STATES(SQ_VTX_SEMANTIC_9
);
181 LINK_STATES(SQ_VTX_SEMANTIC_10
);
182 LINK_STATES(SQ_VTX_SEMANTIC_11
);
183 LINK_STATES(SQ_VTX_SEMANTIC_12
);
184 LINK_STATES(SQ_VTX_SEMANTIC_13
);
185 LINK_STATES(SQ_VTX_SEMANTIC_14
);
186 LINK_STATES(SQ_VTX_SEMANTIC_15
);
187 LINK_STATES(SQ_VTX_SEMANTIC_16
);
188 LINK_STATES(SQ_VTX_SEMANTIC_17
);
189 LINK_STATES(SQ_VTX_SEMANTIC_18
);
190 LINK_STATES(SQ_VTX_SEMANTIC_19
);
191 LINK_STATES(SQ_VTX_SEMANTIC_20
);
192 LINK_STATES(SQ_VTX_SEMANTIC_21
);
193 LINK_STATES(SQ_VTX_SEMANTIC_22
);
194 LINK_STATES(SQ_VTX_SEMANTIC_23
);
195 LINK_STATES(SQ_VTX_SEMANTIC_24
);
196 LINK_STATES(SQ_VTX_SEMANTIC_25
);
197 LINK_STATES(SQ_VTX_SEMANTIC_26
);
198 LINK_STATES(SQ_VTX_SEMANTIC_27
);
199 LINK_STATES(SQ_VTX_SEMANTIC_28
);
200 LINK_STATES(SQ_VTX_SEMANTIC_29
);
201 LINK_STATES(SQ_VTX_SEMANTIC_30
);
202 LINK_STATES(SQ_VTX_SEMANTIC_31
);
205 LINK_STATES(SPI_VS_OUT_ID_0
);
206 LINK_STATES(SPI_VS_OUT_ID_1
);
207 LINK_STATES(SPI_VS_OUT_ID_2
);
208 LINK_STATES(SPI_VS_OUT_ID_3
);
209 LINK_STATES(SPI_VS_OUT_ID_4
);
210 LINK_STATES(SPI_VS_OUT_ID_5
);
211 LINK_STATES(SPI_VS_OUT_ID_6
);
212 LINK_STATES(SPI_VS_OUT_ID_7
);
213 LINK_STATES(SPI_VS_OUT_ID_8
);
214 LINK_STATES(SPI_VS_OUT_ID_9
);
216 LINK_STATES(SPI_VS_OUT_CONFIG
);
217 LINK_STATES(SPI_THREAD_GROUPING
);
218 LINK_STATES(SPI_PS_IN_CONTROL_0
);
219 LINK_STATES(SPI_PS_IN_CONTROL_1
);
220 LINK_STATES(SPI_INTERP_CONTROL_0
);
221 LINK_STATES(SPI_INPUT_Z
);
222 LINK_STATES(SPI_FOG_CNTL
);
223 LINK_STATES(SPI_FOG_FUNC_SCALE
);
224 LINK_STATES(SPI_FOG_FUNC_BIAS
);
227 LINK_STATES(SQ_ESGS_RING_ITEMSIZE
);
228 LINK_STATES(SQ_GSVS_RING_ITEMSIZE
);
229 LINK_STATES(SQ_ESTMP_RING_ITEMSIZE
);
230 LINK_STATES(SQ_GSTMP_RING_ITEMSIZE
);
231 LINK_STATES(SQ_VSTMP_RING_ITEMSIZE
);
232 LINK_STATES(SQ_PSTMP_RING_ITEMSIZE
);
233 LINK_STATES(SQ_FBUF_RING_ITEMSIZE
);
234 LINK_STATES(SQ_REDUC_RING_ITEMSIZE
);
235 //LINK_STATES(SQ_GS_VERT_ITEMSIZE);
237 pStateListWork
->puiValue
= (unsigned int*)&(r700
->SQ_GS_VERT_ITEMSIZE
);
238 pStateListWork
->unOffset
= mmSQ_GS_VERT_ITEMSIZE
- ASIC_CONTEXT_BASE_INDEX
;
239 pStateListWork
->pNext
= NULL
; /* END OF STATE LIST */
244 void r700SetupVTXConstants(GLcontext
* ctx
,
245 unsigned int nStreamID
,
247 unsigned int size
, /* number of elements in vector */
249 unsigned int count
) /* number of vectors in stream */
251 context_t
*context
= R700_CONTEXT(ctx
);
253 struct radeon_aos
* paos
= (struct radeon_aos
*)pAos
;
254 offset_modifiers offset_mod
= {NO_SHIFT
, 0, 0xFFFFFFFF};
256 BATCH_LOCALS(&context
->radeon
);
258 unsigned int uSQ_VTX_CONSTANT_WORD0_0
;
259 unsigned int uSQ_VTX_CONSTANT_WORD1_0
;
260 unsigned int uSQ_VTX_CONSTANT_WORD2_0
= 0;
261 unsigned int uSQ_VTX_CONSTANT_WORD3_0
= 0;
262 unsigned int uSQ_VTX_CONSTANT_WORD6_0
= 0;
264 if ((context
->radeon
.radeonScreen
->chip_family
== CHIP_FAMILY_RV610
) ||
265 (context
->radeon
.radeonScreen
->chip_family
== CHIP_FAMILY_RV620
) ||
266 (context
->radeon
.radeonScreen
->chip_family
== CHIP_FAMILY_RS780
) ||
267 (context
->radeon
.radeonScreen
->chip_family
== CHIP_FAMILY_RV710
))
268 r700SyncSurf(context
, paos
->bo
, RADEON_GEM_DOMAIN_GTT
, 0, TC_ACTION_ENA_bit
);
270 r700SyncSurf(context
, paos
->bo
, RADEON_GEM_DOMAIN_GTT
, 0, VC_ACTION_ENA_bit
);
272 uSQ_VTX_CONSTANT_WORD0_0
= paos
->offset
;
273 uSQ_VTX_CONSTANT_WORD1_0
= count
* (size
* 4) - 1;
275 SETfield(uSQ_VTX_CONSTANT_WORD2_0
, 0, BASE_ADDRESS_HI_shift
, BASE_ADDRESS_HI_mask
); /* TODO */
276 SETfield(uSQ_VTX_CONSTANT_WORD2_0
, stride
, SQ_VTX_CONSTANT_WORD2_0__STRIDE_shift
,
277 SQ_VTX_CONSTANT_WORD2_0__STRIDE_mask
);
278 SETfield(uSQ_VTX_CONSTANT_WORD2_0
, GetSurfaceFormat(GL_FLOAT
, size
, NULL
),
279 SQ_VTX_CONSTANT_WORD2_0__DATA_FORMAT_shift
,
280 SQ_VTX_CONSTANT_WORD2_0__DATA_FORMAT_mask
); /* TODO : trace back api for initial data type, not only GL_FLOAT */
281 SETfield(uSQ_VTX_CONSTANT_WORD2_0
, SQ_NUM_FORMAT_SCALED
,
282 SQ_VTX_CONSTANT_WORD2_0__NUM_FORMAT_ALL_shift
, SQ_VTX_CONSTANT_WORD2_0__NUM_FORMAT_ALL_mask
);
283 SETbit(uSQ_VTX_CONSTANT_WORD2_0
, SQ_VTX_CONSTANT_WORD2_0__FORMAT_COMP_ALL_bit
);
285 SETfield(uSQ_VTX_CONSTANT_WORD3_0
, 1, MEM_REQUEST_SIZE_shift
, MEM_REQUEST_SIZE_mask
);
286 SETfield(uSQ_VTX_CONSTANT_WORD6_0
, SQ_TEX_VTX_VALID_BUFFER
,
287 SQ_TEX_RESOURCE_WORD6_0__TYPE_shift
, SQ_TEX_RESOURCE_WORD6_0__TYPE_mask
);
289 BEGIN_BATCH_NO_AUTOSTATE(9);
291 R600_OUT_BATCH(CP_PACKET3(R600_IT_SET_RESOURCE
, 7));
292 R600_OUT_BATCH((nStreamID
+ SQ_FETCH_RESOURCE_VS_OFFSET
) * FETCH_RESOURCE_STRIDE
);
294 R600_OUT_BATCH_RELOC(uSQ_VTX_CONSTANT_WORD0_0
,
296 uSQ_VTX_CONSTANT_WORD0_0
,
297 RADEON_GEM_DOMAIN_GTT
, 0, 0, &offset_mod
);
298 R600_OUT_BATCH(uSQ_VTX_CONSTANT_WORD1_0
);
299 R600_OUT_BATCH(uSQ_VTX_CONSTANT_WORD2_0
);
300 R600_OUT_BATCH(uSQ_VTX_CONSTANT_WORD3_0
);
303 R600_OUT_BATCH(uSQ_VTX_CONSTANT_WORD6_0
);
310 int r700SetupStreams(GLcontext
* ctx
)
312 context_t
*context
= R700_CONTEXT(ctx
);
314 BATCH_LOCALS(&context
->radeon
);
316 struct r700_vertex_program
*vpc
317 = (struct r700_vertex_program
*)ctx
->VertexProgram
._Current
;
319 TNLcontext
*tnl
= TNL_CONTEXT(ctx
);
320 struct vertex_buffer
*vb
= &tnl
->vb
;
323 unsigned int i
, j
= 0;
325 BEGIN_BATCH_NO_AUTOSTATE(6);
326 R600_OUT_BATCH(CP_PACKET3(R600_IT_SET_CTL_CONST
, 1));
327 R600_OUT_BATCH(mmSQ_VTX_BASE_VTX_LOC
- ASIC_CTL_CONST_BASE_INDEX
);
330 R600_OUT_BATCH(CP_PACKET3(R600_IT_SET_CTL_CONST
, 1));
331 R600_OUT_BATCH(mmSQ_VTX_START_INST_LOC
- ASIC_CTL_CONST_BASE_INDEX
);
336 context
->radeon
.tcl
.aos_count
= 0;
337 for(i
=0; i
<VERT_ATTRIB_MAX
; i
++)
340 if(vpc
->mesa_program
.Base
.InputsRead
& unBit
)
342 rcommon_emit_vector(ctx
,
343 &context
->radeon
.tcl
.aos
[j
],
344 vb
->AttribPtr
[i
]->data
,
345 vb
->AttribPtr
[i
]->size
,
346 vb
->AttribPtr
[i
]->stride
,
349 /* currently aos are packed */
350 r700SetupVTXConstants(ctx
,
352 (void*)(&context
->radeon
.tcl
.aos
[j
]),
353 (unsigned int)context
->radeon
.tcl
.aos
[j
].components
,
354 (unsigned int)context
->radeon
.tcl
.aos
[j
].stride
* 4,
355 (unsigned int)context
->radeon
.tcl
.aos
[j
].count
);
357 context
->radeon
.tcl
.aos_count
++;
361 return R600_FALLBACK_NONE
;
364 GLboolean
r700SendContextStates(context_t
*context
)
366 BATCH_LOCALS(&context
->radeon
);
368 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
370 ContextState
* pState
= r700
->pStateList
;
371 ContextState
* pInit
;
375 while(NULL
!= pState
)
381 while(NULL
!= pState
->pNext
)
383 if ((pState
->pNext
->unOffset
- pState
->unOffset
) > 1)
389 pState
= pState
->pNext
;
394 pState
= pState
->pNext
;
396 BEGIN_BATCH_NO_AUTOSTATE(toSend
+ 2);
397 R600_OUT_BATCH_REGSEQ(((pInit
->unOffset
+ ASIC_CONTEXT_BASE_INDEX
)<<2), toSend
);
398 for(ui
=0; ui
<toSend
; ui
++)
400 R600_OUT_BATCH(*(pInit
->puiValue
));
401 pInit
= pInit
->pNext
;
407 * - split this into a separate function?
408 * - only emit the ones we use
410 BEGIN_BATCH_NO_AUTOSTATE(2 + R700_MAX_SHADER_EXPORTS
);
411 R600_OUT_BATCH_REGSEQ(SPI_PS_INPUT_CNTL_0
, R700_MAX_SHADER_EXPORTS
);
412 for(ui
= 0; ui
< R700_MAX_SHADER_EXPORTS
; ui
++)
413 R600_OUT_BATCH(r700
->SPI_PS_INPUT_CNTL
[ui
].u32All
);
420 GLboolean
r700SendDepthTargetState(context_t
*context
, int id
)
422 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
423 struct radeon_renderbuffer
*rrb
;
424 offset_modifiers offset_mod
;
425 BATCH_LOCALS(&context
->radeon
);
427 rrb
= radeon_get_depthbuffer(&context
->radeon
);
428 if (!rrb
|| !rrb
->bo
) {
429 fprintf(stderr
, "no rrb\n");
433 offset_mod
.shift
= NO_SHIFT
;
434 offset_mod
.shiftbits
= 0;
435 offset_mod
.mask
= 0xFFFFFFFF;
437 BEGIN_BATCH_NO_AUTOSTATE(9);
438 R600_OUT_BATCH_REGSEQ(DB_DEPTH_SIZE
, 2);
439 R600_OUT_BATCH(r700
->DB_DEPTH_SIZE
.u32All
);
440 R600_OUT_BATCH(r700
->DB_DEPTH_VIEW
.u32All
);
441 R600_OUT_BATCH_REGSEQ(DB_DEPTH_BASE
, 3);
442 R600_OUT_BATCH_RELOC(r700
->DB_DEPTH_BASE
.u32All
,
444 r700
->DB_DEPTH_BASE
.u32All
,
445 0, RADEON_GEM_DOMAIN_VRAM
, 0, &offset_mod
);
446 R600_OUT_BATCH(r700
->DB_DEPTH_INFO
.u32All
);
447 R600_OUT_BATCH(r700
->DB_HTILE_DATA_BASE
.u32All
);
450 BEGIN_BATCH_NO_AUTOSTATE(24);
451 R600_OUT_BATCH_REGSEQ(DB_STENCIL_CLEAR
, 2);
452 R600_OUT_BATCH(r700
->DB_STENCIL_CLEAR
.u32All
);
453 R600_OUT_BATCH(r700
->DB_DEPTH_CLEAR
.u32All
);
455 R600_OUT_BATCH_REGSEQ(DB_STENCILREFMASK
, 2);
456 R600_OUT_BATCH(r700
->DB_STENCILREFMASK
.u32All
);
457 R600_OUT_BATCH(r700
->DB_STENCILREFMASK_BF
.u32All
);
459 R600_OUT_BATCH_REGVAL(DB_DEPTH_CONTROL
, r700
->DB_DEPTH_CONTROL
.u32All
);
460 R600_OUT_BATCH_REGVAL(DB_SHADER_CONTROL
, r700
->DB_SHADER_CONTROL
.u32All
);
462 R600_OUT_BATCH_REGSEQ(DB_RENDER_CONTROL
, 2);
463 R600_OUT_BATCH(r700
->DB_RENDER_CONTROL
.u32All
);
464 R600_OUT_BATCH(r700
->DB_RENDER_OVERRIDE
.u32All
);
466 R600_OUT_BATCH_REGVAL(DB_HTILE_SURFACE
, r700
->DB_HTILE_SURFACE
.u32All
);
467 R600_OUT_BATCH_REGVAL(DB_ALPHA_TO_MASK
, r700
->DB_ALPHA_TO_MASK
.u32All
);
472 r700SyncSurf(context
, rrb
->bo
, 0, RADEON_GEM_DOMAIN_VRAM
,
473 DB_ACTION_ENA_bit
| DB_DEST_BASE_ENA_bit
);
478 GLboolean
r700SendRenderTargetState(context_t
*context
, int id
)
480 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
481 struct radeon_renderbuffer
*rrb
;
482 offset_modifiers offset_mod
;
483 BATCH_LOCALS(&context
->radeon
);
485 rrb
= radeon_get_colorbuffer(&context
->radeon
);
486 if (!rrb
|| !rrb
->bo
) {
487 fprintf(stderr
, "no rrb\n");
491 if (id
> R700_MAX_RENDER_TARGETS
)
494 if (!r700
->render_target
[id
].enabled
)
497 offset_mod
.shift
= NO_SHIFT
;
498 offset_mod
.shiftbits
= 0;
499 offset_mod
.mask
= 0xFFFFFFFF;
501 BEGIN_BATCH_NO_AUTOSTATE(3);
502 R600_OUT_BATCH_REGSEQ(CB_COLOR0_BASE
+ (4 * id
), 1);
503 R600_OUT_BATCH_RELOC(r700
->render_target
[id
].CB_COLOR0_BASE
.u32All
,
505 r700
->render_target
[id
].CB_COLOR0_BASE
.u32All
,
506 0, RADEON_GEM_DOMAIN_VRAM
, 0, &offset_mod
);
509 if ((context
->radeon
.radeonScreen
->chip_family
> CHIP_FAMILY_R600
) &&
510 (context
->radeon
.radeonScreen
->chip_family
< CHIP_FAMILY_RV770
)) {
511 BEGIN_BATCH_NO_AUTOSTATE(2);
512 R600_OUT_BATCH(CP_PACKET3(R600_IT_SURFACE_BASE_UPDATE
, 0));
513 R600_OUT_BATCH((2 << id
));
517 BEGIN_BATCH_NO_AUTOSTATE(18);
518 R600_OUT_BATCH_REGVAL(CB_COLOR0_SIZE
+ (4 * id
), r700
->render_target
[id
].CB_COLOR0_SIZE
.u32All
);
519 R600_OUT_BATCH_REGVAL(CB_COLOR0_VIEW
+ (4 * id
), r700
->render_target
[id
].CB_COLOR0_VIEW
.u32All
);
520 R600_OUT_BATCH_REGVAL(CB_COLOR0_INFO
+ (4 * id
), r700
->render_target
[id
].CB_COLOR0_INFO
.u32All
);
521 R600_OUT_BATCH_REGVAL(CB_COLOR0_TILE
+ (4 * id
), r700
->render_target
[id
].CB_COLOR0_TILE
.u32All
);
522 R600_OUT_BATCH_REGVAL(CB_COLOR0_FRAG
+ (4 * id
), r700
->render_target
[id
].CB_COLOR0_FRAG
.u32All
);
523 R600_OUT_BATCH_REGVAL(CB_COLOR0_MASK
+ (4 * id
), r700
->render_target
[id
].CB_COLOR0_MASK
.u32All
);
526 if (context
->radeon
.radeonScreen
->chip_family
> CHIP_FAMILY_R600
) {
527 BEGIN_BATCH_NO_AUTOSTATE(3);
528 R600_OUT_BATCH_REGVAL(CB_BLEND0_CONTROL
+ (4 * id
), r700
->render_target
[id
].CB_BLEND0_CONTROL
.u32All
);
534 r700SyncSurf(context
, rrb
->bo
, 0, RADEON_GEM_DOMAIN_VRAM
,
535 CB_ACTION_ENA_bit
| (1 << (id
+ 6)));
540 GLboolean
r700SendPSState(context_t
*context
)
542 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
543 struct radeon_renderbuffer
*rrb
;
544 struct radeon_bo
* pbo
;
545 offset_modifiers offset_mod
;
546 BATCH_LOCALS(&context
->radeon
);
548 pbo
= (struct radeon_bo
*)r700GetActiveFpShaderBo(GL_CONTEXT(context
));
550 offset_mod
.shift
= NO_SHIFT
;
551 offset_mod
.shiftbits
= 0;
552 offset_mod
.mask
= 0xFFFFFFFF;
554 r700SyncSurf(context
, pbo
, RADEON_GEM_DOMAIN_GTT
, 0, SH_ACTION_ENA_bit
);
556 BEGIN_BATCH_NO_AUTOSTATE(3);
557 R600_OUT_BATCH_REGSEQ(SQ_PGM_START_PS
, 1);
558 R600_OUT_BATCH_RELOC(r700
->ps
.SQ_PGM_START_PS
.u32All
,
560 r700
->ps
.SQ_PGM_START_PS
.u32All
,
561 RADEON_GEM_DOMAIN_GTT
, 0, 0, &offset_mod
);
564 BEGIN_BATCH_NO_AUTOSTATE(9);
565 R600_OUT_BATCH_REGVAL(SQ_PGM_RESOURCES_PS
, r700
->ps
.SQ_PGM_RESOURCES_PS
.u32All
);
566 R600_OUT_BATCH_REGVAL(SQ_PGM_EXPORTS_PS
, r700
->ps
.SQ_PGM_EXPORTS_PS
.u32All
);
567 R600_OUT_BATCH_REGVAL(SQ_PGM_CF_OFFSET_PS
, r700
->ps
.SQ_PGM_CF_OFFSET_PS
.u32All
);
575 GLboolean
r700SendVSState(context_t
*context
)
577 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
578 struct radeon_renderbuffer
*rrb
;
579 struct radeon_bo
* pbo
;
580 offset_modifiers offset_mod
;
581 BATCH_LOCALS(&context
->radeon
);
583 pbo
= (struct radeon_bo
*)r700GetActiveVpShaderBo(GL_CONTEXT(context
));
585 offset_mod
.shift
= NO_SHIFT
;
586 offset_mod
.shiftbits
= 0;
587 offset_mod
.mask
= 0xFFFFFFFF;
589 r700SyncSurf(context
, pbo
, RADEON_GEM_DOMAIN_GTT
, 0, SH_ACTION_ENA_bit
);
591 BEGIN_BATCH_NO_AUTOSTATE(3);
592 R600_OUT_BATCH_REGSEQ(SQ_PGM_START_VS
, 1);
593 R600_OUT_BATCH_RELOC(r700
->vs
.SQ_PGM_START_VS
.u32All
,
595 r700
->vs
.SQ_PGM_START_VS
.u32All
,
596 RADEON_GEM_DOMAIN_GTT
, 0, 0, &offset_mod
);
599 BEGIN_BATCH_NO_AUTOSTATE(6);
600 R600_OUT_BATCH_REGVAL(SQ_PGM_RESOURCES_VS
, r700
->vs
.SQ_PGM_RESOURCES_VS
.u32All
);
601 R600_OUT_BATCH_REGVAL(SQ_PGM_CF_OFFSET_VS
, r700
->vs
.SQ_PGM_CF_OFFSET_VS
.u32All
);
609 GLboolean
r700SendFSState(context_t
*context
)
611 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
612 struct radeon_renderbuffer
*rrb
;
613 struct radeon_bo
* pbo
;
614 offset_modifiers offset_mod
;
615 BATCH_LOCALS(&context
->radeon
);
618 * R6xx chips require a FS be emitted, even if it's not used.
619 * since we aren't using FS yet, just send the VS address to make
620 * the kernel command checker happy
622 pbo
= (struct radeon_bo
*)r700GetActiveVpShaderBo(GL_CONTEXT(context
));
623 r700
->fs
.SQ_PGM_START_FS
.u32All
= r700
->vs
.SQ_PGM_START_VS
.u32All
;
624 r700
->fs
.SQ_PGM_RESOURCES_FS
.u32All
= 0;
625 r700
->fs
.SQ_PGM_CF_OFFSET_FS
.u32All
= 0;
628 offset_mod
.shift
= NO_SHIFT
;
629 offset_mod
.shiftbits
= 0;
630 offset_mod
.mask
= 0xFFFFFFFF;
632 r700SyncSurf(context
, pbo
, RADEON_GEM_DOMAIN_GTT
, 0, SH_ACTION_ENA_bit
);
634 BEGIN_BATCH_NO_AUTOSTATE(3);
635 R600_OUT_BATCH_REGSEQ(SQ_PGM_START_FS
, 1);
636 R600_OUT_BATCH_RELOC(r700
->fs
.SQ_PGM_START_FS
.u32All
,
638 r700
->fs
.SQ_PGM_START_FS
.u32All
,
639 RADEON_GEM_DOMAIN_GTT
, 0, 0, &offset_mod
);
642 BEGIN_BATCH_NO_AUTOSTATE(6);
643 R600_OUT_BATCH_REGVAL(SQ_PGM_RESOURCES_FS
, r700
->fs
.SQ_PGM_RESOURCES_FS
.u32All
);
644 R600_OUT_BATCH_REGVAL(SQ_PGM_CF_OFFSET_FS
, r700
->fs
.SQ_PGM_CF_OFFSET_FS
.u32All
);
652 GLboolean
r700SendViewportState(context_t
*context
, int id
)
654 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
655 struct radeon_renderbuffer
*rrb
;
656 offset_modifiers offset_mod
;
657 BATCH_LOCALS(&context
->radeon
);
659 if (id
> R700_MAX_VIEWPORTS
)
662 if (!r700
->viewport
[id
].enabled
)
665 BEGIN_BATCH_NO_AUTOSTATE(16);
666 R600_OUT_BATCH_REGSEQ(PA_SC_VPORT_SCISSOR_0_TL
+ (8 * id
), 2);
667 R600_OUT_BATCH(r700
->viewport
[id
].PA_SC_VPORT_SCISSOR_0_TL
.u32All
);
668 R600_OUT_BATCH(r700
->viewport
[id
].PA_SC_VPORT_SCISSOR_0_BR
.u32All
);
669 R600_OUT_BATCH_REGSEQ(PA_SC_VPORT_ZMIN_0
+ (8 * id
), 2);
670 R600_OUT_BATCH(r700
->viewport
[id
].PA_SC_VPORT_ZMIN_0
.u32All
);
671 R600_OUT_BATCH(r700
->viewport
[id
].PA_SC_VPORT_ZMAX_0
.u32All
);
672 R600_OUT_BATCH_REGSEQ(PA_CL_VPORT_XSCALE_0
+ (24 * id
), 6);
673 R600_OUT_BATCH(r700
->viewport
[id
].PA_CL_VPORT_XSCALE
.u32All
);
674 R600_OUT_BATCH(r700
->viewport
[id
].PA_CL_VPORT_XOFFSET
.u32All
);
675 R600_OUT_BATCH(r700
->viewport
[id
].PA_CL_VPORT_YSCALE
.u32All
);
676 R600_OUT_BATCH(r700
->viewport
[id
].PA_CL_VPORT_YOFFSET
.u32All
);
677 R600_OUT_BATCH(r700
->viewport
[id
].PA_CL_VPORT_ZSCALE
.u32All
);
678 R600_OUT_BATCH(r700
->viewport
[id
].PA_CL_VPORT_ZOFFSET
.u32All
);
686 GLboolean
r700SendSQConfig(context_t
*context
)
688 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
689 BATCH_LOCALS(&context
->radeon
);
691 BEGIN_BATCH_NO_AUTOSTATE(8);
692 R600_OUT_BATCH_REGSEQ(SQ_CONFIG
, 6);
693 R600_OUT_BATCH(r700
->sq_config
.SQ_CONFIG
.u32All
);
694 R600_OUT_BATCH(r700
->sq_config
.SQ_GPR_RESOURCE_MGMT_1
.u32All
);
695 R600_OUT_BATCH(r700
->sq_config
.SQ_GPR_RESOURCE_MGMT_2
.u32All
);
696 R600_OUT_BATCH(r700
->sq_config
.SQ_THREAD_RESOURCE_MGMT
.u32All
);
697 R600_OUT_BATCH(r700
->sq_config
.SQ_STACK_RESOURCE_MGMT_1
.u32All
);
698 R600_OUT_BATCH(r700
->sq_config
.SQ_STACK_RESOURCE_MGMT_2
.u32All
);
705 GLboolean
r700SendUCPState(context_t
*context
)
707 R700_CHIP_CONTEXT
*r700
= R700_CONTEXT_STATES(context
);
708 BATCH_LOCALS(&context
->radeon
);
711 for (i
= 0; i
< R700_MAX_UCP
; i
++) {
712 if (r700
->ucp
[i
].enabled
) {
713 BEGIN_BATCH_NO_AUTOSTATE(6);
714 R600_OUT_BATCH_REGSEQ(PA_CL_UCP_0_X
+ (16 * i
), 4);
715 R600_OUT_BATCH(r700
->ucp
[i
].PA_CL_UCP_0_X
.u32All
);
716 R600_OUT_BATCH(r700
->ucp
[i
].PA_CL_UCP_0_Y
.u32All
);
717 R600_OUT_BATCH(r700
->ucp
[i
].PA_CL_UCP_0_Z
.u32All
);
718 R600_OUT_BATCH(r700
->ucp
[i
].PA_CL_UCP_0_W
.u32All
);