ba1471eb78586bb5d60e2b1555cddc13eb8d8ee6
[mesa.git] / src / gallium / drivers / r600 / r600_asm.c
1 /*
2 * Copyright 2010 Jerome Glisse <glisse@freedesktop.org>
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23 #include <stdio.h>
24 #include <errno.h>
25 #include "util/u_memory.h"
26 #include "pipe/p_shader_tokens.h"
27 #include "r600_pipe.h"
28 #include "r600_sq.h"
29 #include "r600_opcodes.h"
30 #include "r600_asm.h"
31
32 static inline unsigned int r600_bc_get_num_operands(struct r600_bc_alu *alu)
33 {
34 if(alu->is_op3)
35 return 3;
36
37 switch (alu->inst) {
38 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_NOP:
39 return 0;
40 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_ADD:
41 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_KILLE:
42 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_KILLGT:
43 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_KILLGE:
44 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_KILLNE:
45 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_MUL:
46 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_MAX:
47 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_MIN:
48 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_SETE:
49 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_SETNE:
50 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_SETGT:
51 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_SETGE:
52 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_PRED_SETE:
53 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_PRED_SETGT:
54 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_PRED_SETGE:
55 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_PRED_SETNE:
56 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_DOT4:
57 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_DOT4_IEEE:
58 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_CUBE:
59 return 2;
60
61 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_MOV:
62 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_MOVA_FLOOR:
63 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_FRACT:
64 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_FLOOR:
65 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_TRUNC:
66 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_EXP_IEEE:
67 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_LOG_CLAMPED:
68 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_LOG_IEEE:
69 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_RECIP_IEEE:
70 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_RECIPSQRT_IEEE:
71 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_FLT_TO_INT:
72 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_SIN:
73 case V_SQ_ALU_WORD1_OP2_SQ_OP2_INST_COS:
74 return 1;
75 default: R600_ERR(
76 "Need instruction operand number for 0x%x.\n", alu->inst);
77 };
78
79 return 3;
80 }
81
82 int r700_bc_alu_build(struct r600_bc *bc, struct r600_bc_alu *alu, unsigned id);
83
84 static struct r600_bc_cf *r600_bc_cf(void)
85 {
86 struct r600_bc_cf *cf = CALLOC_STRUCT(r600_bc_cf);
87
88 if (cf == NULL)
89 return NULL;
90 LIST_INITHEAD(&cf->list);
91 LIST_INITHEAD(&cf->alu);
92 LIST_INITHEAD(&cf->vtx);
93 LIST_INITHEAD(&cf->tex);
94 return cf;
95 }
96
97 static struct r600_bc_alu *r600_bc_alu(void)
98 {
99 struct r600_bc_alu *alu = CALLOC_STRUCT(r600_bc_alu);
100
101 if (alu == NULL)
102 return NULL;
103 LIST_INITHEAD(&alu->list);
104 LIST_INITHEAD(&alu->bs_list);
105 return alu;
106 }
107
108 static struct r600_bc_vtx *r600_bc_vtx(void)
109 {
110 struct r600_bc_vtx *vtx = CALLOC_STRUCT(r600_bc_vtx);
111
112 if (vtx == NULL)
113 return NULL;
114 LIST_INITHEAD(&vtx->list);
115 return vtx;
116 }
117
118 static struct r600_bc_tex *r600_bc_tex(void)
119 {
120 struct r600_bc_tex *tex = CALLOC_STRUCT(r600_bc_tex);
121
122 if (tex == NULL)
123 return NULL;
124 LIST_INITHEAD(&tex->list);
125 return tex;
126 }
127
128 int r600_bc_init(struct r600_bc *bc, enum radeon_family family)
129 {
130 LIST_INITHEAD(&bc->cf);
131 bc->family = family;
132 switch (bc->family) {
133 case CHIP_R600:
134 case CHIP_RV610:
135 case CHIP_RV630:
136 case CHIP_RV670:
137 case CHIP_RV620:
138 case CHIP_RV635:
139 case CHIP_RS780:
140 case CHIP_RS880:
141 bc->chiprev = CHIPREV_R600;
142 break;
143 case CHIP_RV770:
144 case CHIP_RV730:
145 case CHIP_RV710:
146 case CHIP_RV740:
147 bc->chiprev = CHIPREV_R700;
148 break;
149 case CHIP_CEDAR:
150 case CHIP_REDWOOD:
151 case CHIP_JUNIPER:
152 case CHIP_CYPRESS:
153 case CHIP_HEMLOCK:
154 bc->chiprev = CHIPREV_EVERGREEN;
155 break;
156 default:
157 R600_ERR("unknown family %d\n", bc->family);
158 return -EINVAL;
159 }
160 return 0;
161 }
162
163 static int r600_bc_add_cf(struct r600_bc *bc)
164 {
165 struct r600_bc_cf *cf = r600_bc_cf();
166
167 if (cf == NULL)
168 return -ENOMEM;
169 LIST_ADDTAIL(&cf->list, &bc->cf);
170 if (bc->cf_last)
171 cf->id = bc->cf_last->id + 2;
172 bc->cf_last = cf;
173 bc->ncf++;
174 bc->ndw += 2;
175 bc->force_add_cf = 0;
176 return 0;
177 }
178
179 int r600_bc_add_output(struct r600_bc *bc, const struct r600_bc_output *output)
180 {
181 int r;
182
183 r = r600_bc_add_cf(bc);
184 if (r)
185 return r;
186 bc->cf_last->inst = output->inst;
187 memcpy(&bc->cf_last->output, output, sizeof(struct r600_bc_output));
188 return 0;
189 }
190
191 const unsigned bank_swizzle_vec[8] = {SQ_ALU_VEC_210, //000
192 SQ_ALU_VEC_120, //001
193 SQ_ALU_VEC_102, //010
194
195 SQ_ALU_VEC_201, //011
196 SQ_ALU_VEC_012, //100
197 SQ_ALU_VEC_021, //101
198
199 SQ_ALU_VEC_012, //110
200 SQ_ALU_VEC_012}; //111
201
202 const unsigned bank_swizzle_scl[8] = {SQ_ALU_SCL_210, //000
203 SQ_ALU_SCL_122, //001
204 SQ_ALU_SCL_122, //010
205
206 SQ_ALU_SCL_221, //011
207 SQ_ALU_SCL_212, //100
208 SQ_ALU_SCL_122, //101
209
210 SQ_ALU_SCL_122, //110
211 SQ_ALU_SCL_122}; //111
212
213 static int init_gpr(struct r600_bc_alu *alu)
214 {
215 int cycle, component;
216 /* set up gpr use */
217 for (cycle = 0; cycle < NUM_OF_CYCLES; cycle++)
218 for (component = 0; component < NUM_OF_COMPONENTS; component++)
219 alu->hw_gpr[cycle][component] = -1;
220 return 0;
221 }
222
223 #if 0
224 static int reserve_gpr(struct r600_bc_alu *alu, unsigned sel, unsigned chan, unsigned cycle)
225 {
226 if (alu->hw_gpr[cycle][chan] < 0)
227 alu->hw_gpr[cycle][chan] = sel;
228 else if (alu->hw_gpr[cycle][chan] != (int)sel) {
229 R600_ERR("Another scalar operation has already used GPR read port for channel\n");
230 return -1;
231 }
232 return 0;
233 }
234
235 static int cycle_for_scalar_bank_swizzle(const int swiz, const int sel, unsigned *p_cycle)
236 {
237 int table[3];
238 int ret = 0;
239 switch (swiz) {
240 case SQ_ALU_SCL_210:
241 table[0] = 2; table[1] = 1; table[2] = 0;
242 *p_cycle = table[sel];
243 break;
244 case SQ_ALU_SCL_122:
245 table[0] = 1; table[1] = 2; table[2] = 2;
246 *p_cycle = table[sel];
247 break;
248 case SQ_ALU_SCL_212:
249 table[0] = 2; table[1] = 1; table[2] = 2;
250 *p_cycle = table[sel];
251 break;
252 case SQ_ALU_SCL_221:
253 table[0] = 2; table[1] = 2; table[2] = 1;
254 *p_cycle = table[sel];
255 break;
256 break;
257 default:
258 R600_ERR("bad scalar bank swizzle value\n");
259 ret = -1;
260 break;
261 }
262 return ret;
263 }
264
265 static int cycle_for_vector_bank_swizzle(const int swiz, const int sel, unsigned *p_cycle)
266 {
267 int table[3];
268 int ret;
269
270 switch (swiz) {
271 case SQ_ALU_VEC_012:
272 table[0] = 0; table[1] = 1; table[2] = 2;
273 *p_cycle = table[sel];
274 break;
275 case SQ_ALU_VEC_021:
276 table[0] = 0; table[1] = 2; table[2] = 1;
277 *p_cycle = table[sel];
278 break;
279 case SQ_ALU_VEC_120:
280 table[0] = 1; table[1] = 2; table[2] = 0;
281 *p_cycle = table[sel];
282 break;
283 case SQ_ALU_VEC_102:
284 table[0] = 1; table[1] = 0; table[2] = 2;
285 *p_cycle = table[sel];
286 break;
287 case SQ_ALU_VEC_201:
288 table[0] = 2; table[1] = 0; table[2] = 1;
289 *p_cycle = table[sel];
290 break;
291 case SQ_ALU_VEC_210:
292 table[0] = 2; table[1] = 1; table[2] = 0;
293 *p_cycle = table[sel];
294 break;
295 default:
296 R600_ERR("bad vector bank swizzle value\n");
297 ret = -1;
298 break;
299 }
300 return ret;
301 }
302
303
304
305 static void update_chan_counter(struct r600_bc_alu *alu, int *chan_counter)
306 {
307 int num_src;
308 int i;
309 int channel_swizzle;
310
311 num_src = r600_bc_get_num_operands(alu);
312
313 for (i = 0; i < num_src; i++) {
314 channel_swizzle = alu->src[i].chan;
315 if ((alu->src[i].sel > 0 && alu->src[i].sel < 128) && channel_swizzle <= 3)
316 chan_counter[channel_swizzle]++;
317 }
318 }
319
320 /* we need something like this I think - but this is bogus */
321 int check_read_slots(struct r600_bc *bc, struct r600_bc_alu *alu_first)
322 {
323 struct r600_bc_alu *alu;
324 int chan_counter[4] = { 0 };
325
326 update_chan_counter(alu_first, chan_counter);
327
328 LIST_FOR_EACH_ENTRY(alu, &alu_first->bs_list, bs_list) {
329 update_chan_counter(alu, chan_counter);
330 }
331
332 if (chan_counter[0] > 3 ||
333 chan_counter[1] > 3 ||
334 chan_counter[2] > 3 ||
335 chan_counter[3] > 3) {
336 R600_ERR("needed to split instruction for input ran out of banks %x %d %d %d %d\n",
337 alu_first->inst, chan_counter[0], chan_counter[1], chan_counter[2], chan_counter[3]);
338 return -1;
339 }
340 return 0;
341 }
342 #endif
343
344 static int is_const(int sel)
345 {
346 if (sel > 255 && sel < 512)
347 return 1;
348 if (sel >= V_SQ_ALU_SRC_0 && sel <= V_SQ_ALU_SRC_LITERAL)
349 return 1;
350 return 0;
351 }
352
353 static int check_scalar(struct r600_bc *bc, struct r600_bc_alu *alu)
354 {
355 unsigned swizzle_key;
356
357 if (alu->bank_swizzle_force) {
358 alu->bank_swizzle = alu->bank_swizzle_force;
359 return 0;
360 }
361 swizzle_key = (is_const(alu->src[0].sel) ? 4 : 0 ) +
362 (is_const(alu->src[1].sel) ? 2 : 0 ) +
363 (is_const(alu->src[2].sel) ? 1 : 0 );
364
365 alu->bank_swizzle = bank_swizzle_scl[swizzle_key];
366 return 0;
367 }
368
369 static int check_vector(struct r600_bc *bc, struct r600_bc_alu *alu)
370 {
371 unsigned swizzle_key;
372
373 if (alu->bank_swizzle_force) {
374 alu->bank_swizzle = alu->bank_swizzle_force;
375 return 0;
376 }
377 swizzle_key = (is_const(alu->src[0].sel) ? 4 : 0 ) +
378 (is_const(alu->src[1].sel) ? 2 : 0 ) +
379 (is_const(alu->src[2].sel) ? 1 : 0 );
380
381 alu->bank_swizzle = bank_swizzle_vec[swizzle_key];
382 return 0;
383 }
384
385 static int check_and_set_bank_swizzle(struct r600_bc *bc, struct r600_bc_alu *alu_first)
386 {
387 struct r600_bc_alu *alu = NULL;
388 int num_instr = 1;
389
390 init_gpr(alu_first);
391
392 LIST_FOR_EACH_ENTRY(alu, &alu_first->bs_list, bs_list) {
393 num_instr++;
394 }
395
396 if (num_instr == 1) {
397 check_scalar(bc, alu_first);
398
399 } else {
400 /* check_read_slots(bc, bc->cf_last->curr_bs_head);*/
401 check_vector(bc, alu_first);
402 LIST_FOR_EACH_ENTRY(alu, &alu_first->bs_list, bs_list) {
403 check_vector(bc, alu);
404 }
405 }
406 return 0;
407 }
408
409 int r600_bc_add_alu_type(struct r600_bc *bc, const struct r600_bc_alu *alu, int type)
410 {
411 struct r600_bc_alu *nalu = r600_bc_alu();
412 struct r600_bc_alu *lalu;
413 int i, r;
414
415 if (nalu == NULL)
416 return -ENOMEM;
417 memcpy(nalu, alu, sizeof(struct r600_bc_alu));
418 nalu->nliteral = 0;
419
420 /* cf can contains only alu or only vtx or only tex */
421 if (bc->cf_last == NULL || bc->cf_last->inst != (type << 3) ||
422 bc->force_add_cf) {
423 r = r600_bc_add_cf(bc);
424 if (r) {
425 free(nalu);
426 return r;
427 }
428 bc->cf_last->inst = (type << 3);
429 }
430 if (!bc->cf_last->curr_bs_head) {
431 bc->cf_last->curr_bs_head = nalu;
432 LIST_INITHEAD(&nalu->bs_list);
433 } else {
434 LIST_ADDTAIL(&nalu->bs_list, &bc->cf_last->curr_bs_head->bs_list);
435 }
436 /* at most 128 slots, one add alu can add 4 slots + 4 constants(2 slots)
437 * worst case */
438 if (alu->last && (bc->cf_last->ndw >> 1) >= 120) {
439 bc->force_add_cf = 1;
440 }
441 /* number of gpr == the last gpr used in any alu */
442 for (i = 0; i < 3; i++) {
443 if (alu->src[i].sel >= bc->ngpr && alu->src[i].sel < 128) {
444 bc->ngpr = alu->src[i].sel + 1;
445 }
446 /* compute how many literal are needed
447 * either 2 or 4 literals
448 */
449 if (alu->src[i].sel == 253) {
450 if (((alu->src[i].chan + 2) & 0x6) > nalu->nliteral) {
451 nalu->nliteral = (alu->src[i].chan + 2) & 0x6;
452 }
453 }
454 }
455 if (!LIST_IS_EMPTY(&bc->cf_last->alu)) {
456 lalu = LIST_ENTRY(struct r600_bc_alu, bc->cf_last->alu.prev, list);
457 if (!lalu->last && lalu->nliteral > nalu->nliteral) {
458 nalu->nliteral = lalu->nliteral;
459 }
460 }
461 if (alu->dst.sel >= bc->ngpr) {
462 bc->ngpr = alu->dst.sel + 1;
463 }
464 LIST_ADDTAIL(&nalu->list, &bc->cf_last->alu);
465 /* each alu use 2 dwords */
466 bc->cf_last->ndw += 2;
467 bc->ndw += 2;
468
469 bc->cf_last->kcache0_mode = 2;
470
471 /* process cur ALU instructions for bank swizzle */
472 if (alu->last) {
473 check_and_set_bank_swizzle(bc, bc->cf_last->curr_bs_head);
474 bc->cf_last->curr_bs_head = NULL;
475 }
476 return 0;
477 }
478
479 int r600_bc_add_alu(struct r600_bc *bc, const struct r600_bc_alu *alu)
480 {
481 return r600_bc_add_alu_type(bc, alu, BC_INST(bc, V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU));
482 }
483
484 int r600_bc_add_literal(struct r600_bc *bc, const u32 *value)
485 {
486 struct r600_bc_alu *alu;
487
488 if (bc->cf_last == NULL) {
489 return 0;
490 }
491 if (bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_TEX) {
492 return 0;
493 }
494 /* all same on EG */
495 if (bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_JUMP ||
496 bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_ELSE ||
497 bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_LOOP_START_NO_AL ||
498 bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_LOOP_BREAK ||
499 bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_LOOP_CONTINUE ||
500 bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_LOOP_END ||
501 bc->cf_last->inst == V_SQ_CF_WORD1_SQ_CF_INST_POP) {
502 return 0;
503 }
504 /* same on EG */
505 if (((bc->cf_last->inst != (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU << 3)) &&
506 (bc->cf_last->inst != (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU_PUSH_BEFORE << 3))) ||
507 LIST_IS_EMPTY(&bc->cf_last->alu)) {
508 R600_ERR("last CF is not ALU (%p)\n", bc->cf_last);
509 return -EINVAL;
510 }
511 alu = LIST_ENTRY(struct r600_bc_alu, bc->cf_last->alu.prev, list);
512 if (!alu->last || !alu->nliteral || alu->literal_added) {
513 return 0;
514 }
515 memcpy(alu->value, value, 4 * 4);
516 bc->cf_last->ndw += alu->nliteral;
517 bc->ndw += alu->nliteral;
518 alu->literal_added = 1;
519 return 0;
520 }
521
522 int r600_bc_add_vtx(struct r600_bc *bc, const struct r600_bc_vtx *vtx)
523 {
524 struct r600_bc_vtx *nvtx = r600_bc_vtx();
525 int r;
526
527 if (nvtx == NULL)
528 return -ENOMEM;
529 memcpy(nvtx, vtx, sizeof(struct r600_bc_vtx));
530
531 /* cf can contains only alu or only vtx or only tex */
532 if (bc->cf_last == NULL ||
533 (bc->cf_last->inst != V_SQ_CF_WORD1_SQ_CF_INST_VTX &&
534 bc->cf_last->inst != V_SQ_CF_WORD1_SQ_CF_INST_VTX_TC) ||
535 bc->force_add_cf) {
536 r = r600_bc_add_cf(bc);
537 if (r) {
538 free(nvtx);
539 return r;
540 }
541 bc->cf_last->inst = V_SQ_CF_WORD1_SQ_CF_INST_VTX;
542 }
543 LIST_ADDTAIL(&nvtx->list, &bc->cf_last->vtx);
544 /* each fetch use 4 dwords */
545 bc->cf_last->ndw += 4;
546 bc->ndw += 4;
547 if ((bc->ndw / 4) > 7)
548 bc->force_add_cf = 1;
549 return 0;
550 }
551
552 int r600_bc_add_tex(struct r600_bc *bc, const struct r600_bc_tex *tex)
553 {
554 struct r600_bc_tex *ntex = r600_bc_tex();
555 int r;
556
557 if (ntex == NULL)
558 return -ENOMEM;
559 memcpy(ntex, tex, sizeof(struct r600_bc_tex));
560
561 /* cf can contains only alu or only vtx or only tex */
562 if (bc->cf_last == NULL ||
563 bc->cf_last->inst != V_SQ_CF_WORD1_SQ_CF_INST_TEX ||
564 bc->force_add_cf) {
565 r = r600_bc_add_cf(bc);
566 if (r) {
567 free(ntex);
568 return r;
569 }
570 bc->cf_last->inst = V_SQ_CF_WORD1_SQ_CF_INST_TEX;
571 }
572 LIST_ADDTAIL(&ntex->list, &bc->cf_last->tex);
573 /* each texture fetch use 4 dwords */
574 bc->cf_last->ndw += 4;
575 bc->ndw += 4;
576 if ((bc->ndw / 4) > 7)
577 bc->force_add_cf = 1;
578 return 0;
579 }
580
581 int r600_bc_add_cfinst(struct r600_bc *bc, int inst)
582 {
583 int r;
584 r = r600_bc_add_cf(bc);
585 if (r)
586 return r;
587
588 bc->cf_last->cond = V_SQ_CF_COND_ACTIVE;
589 bc->cf_last->inst = inst;
590 return 0;
591 }
592
593 /* common to all 3 families */
594 static int r600_bc_vtx_build(struct r600_bc *bc, struct r600_bc_vtx *vtx, unsigned id)
595 {
596 unsigned fetch_resource_start = 0;
597
598 /* check if we are fetch shader */
599 /* fetch shader can also access vertex resource,
600 * first fetch shader resource is at 160
601 */
602 if (bc->type == -1) {
603 switch (bc->chiprev) {
604 /* r600 */
605 case CHIPREV_R600:
606 /* r700 */
607 case CHIPREV_R700:
608 fetch_resource_start = 160;
609 break;
610 /* evergreen */
611 case CHIPREV_EVERGREEN:
612 fetch_resource_start = 0;
613 break;
614 default:
615 fprintf(stderr, "%s:%s:%d unknown chiprev %d\n",
616 __FILE__, __func__, __LINE__, bc->chiprev);
617 break;
618 }
619 }
620 bc->bytecode[id++] = S_SQ_VTX_WORD0_BUFFER_ID(vtx->buffer_id + fetch_resource_start) |
621 S_SQ_VTX_WORD0_SRC_GPR(vtx->src_gpr) |
622 S_SQ_VTX_WORD0_SRC_SEL_X(vtx->src_sel_x) |
623 S_SQ_VTX_WORD0_MEGA_FETCH_COUNT(vtx->mega_fetch_count);
624 bc->bytecode[id++] = S_SQ_VTX_WORD1_DST_SEL_X(vtx->dst_sel_x) |
625 S_SQ_VTX_WORD1_DST_SEL_Y(vtx->dst_sel_y) |
626 S_SQ_VTX_WORD1_DST_SEL_Z(vtx->dst_sel_z) |
627 S_SQ_VTX_WORD1_DST_SEL_W(vtx->dst_sel_w) |
628 S_SQ_VTX_WORD1_USE_CONST_FIELDS(vtx->use_const_fields) |
629 S_SQ_VTX_WORD1_DATA_FORMAT(vtx->data_format) |
630 S_SQ_VTX_WORD1_NUM_FORMAT_ALL(vtx->num_format_all) |
631 S_SQ_VTX_WORD1_FORMAT_COMP_ALL(vtx->format_comp_all) |
632 S_SQ_VTX_WORD1_SRF_MODE_ALL(vtx->srf_mode_all) |
633 S_SQ_VTX_WORD1_GPR_DST_GPR(vtx->dst_gpr);
634 bc->bytecode[id++] = S_SQ_VTX_WORD2_MEGA_FETCH(1);
635 bc->bytecode[id++] = 0;
636 return 0;
637 }
638
639 /* common to all 3 families */
640 static int r600_bc_tex_build(struct r600_bc *bc, struct r600_bc_tex *tex, unsigned id)
641 {
642 bc->bytecode[id++] = S_SQ_TEX_WORD0_TEX_INST(tex->inst) |
643 S_SQ_TEX_WORD0_RESOURCE_ID(tex->resource_id) |
644 S_SQ_TEX_WORD0_SRC_GPR(tex->src_gpr) |
645 S_SQ_TEX_WORD0_SRC_REL(tex->src_rel);
646 bc->bytecode[id++] = S_SQ_TEX_WORD1_DST_GPR(tex->dst_gpr) |
647 S_SQ_TEX_WORD1_DST_REL(tex->dst_rel) |
648 S_SQ_TEX_WORD1_DST_SEL_X(tex->dst_sel_x) |
649 S_SQ_TEX_WORD1_DST_SEL_Y(tex->dst_sel_y) |
650 S_SQ_TEX_WORD1_DST_SEL_Z(tex->dst_sel_z) |
651 S_SQ_TEX_WORD1_DST_SEL_W(tex->dst_sel_w) |
652 S_SQ_TEX_WORD1_LOD_BIAS(tex->lod_bias) |
653 S_SQ_TEX_WORD1_COORD_TYPE_X(tex->coord_type_x) |
654 S_SQ_TEX_WORD1_COORD_TYPE_Y(tex->coord_type_y) |
655 S_SQ_TEX_WORD1_COORD_TYPE_Z(tex->coord_type_z) |
656 S_SQ_TEX_WORD1_COORD_TYPE_W(tex->coord_type_w);
657 bc->bytecode[id++] = S_SQ_TEX_WORD2_OFFSET_X(tex->offset_x) |
658 S_SQ_TEX_WORD2_OFFSET_Y(tex->offset_y) |
659 S_SQ_TEX_WORD2_OFFSET_Z(tex->offset_z) |
660 S_SQ_TEX_WORD2_SAMPLER_ID(tex->sampler_id) |
661 S_SQ_TEX_WORD2_SRC_SEL_X(tex->src_sel_x) |
662 S_SQ_TEX_WORD2_SRC_SEL_Y(tex->src_sel_y) |
663 S_SQ_TEX_WORD2_SRC_SEL_Z(tex->src_sel_z) |
664 S_SQ_TEX_WORD2_SRC_SEL_W(tex->src_sel_w);
665 bc->bytecode[id++] = 0;
666 return 0;
667 }
668
669 /* r600 only, r700/eg bits in r700_asm.c */
670 static int r600_bc_alu_build(struct r600_bc *bc, struct r600_bc_alu *alu, unsigned id)
671 {
672 unsigned i;
673
674 /* don't replace gpr by pv or ps for destination register */
675 bc->bytecode[id++] = S_SQ_ALU_WORD0_SRC0_SEL(alu->src[0].sel) |
676 S_SQ_ALU_WORD0_SRC0_REL(alu->src[0].rel) |
677 S_SQ_ALU_WORD0_SRC0_CHAN(alu->src[0].chan) |
678 S_SQ_ALU_WORD0_SRC0_NEG(alu->src[0].neg) |
679 S_SQ_ALU_WORD0_SRC1_SEL(alu->src[1].sel) |
680 S_SQ_ALU_WORD0_SRC1_REL(alu->src[1].rel) |
681 S_SQ_ALU_WORD0_SRC1_CHAN(alu->src[1].chan) |
682 S_SQ_ALU_WORD0_SRC1_NEG(alu->src[1].neg) |
683 S_SQ_ALU_WORD0_LAST(alu->last);
684
685 if (alu->is_op3) {
686 bc->bytecode[id++] = S_SQ_ALU_WORD1_DST_GPR(alu->dst.sel) |
687 S_SQ_ALU_WORD1_DST_CHAN(alu->dst.chan) |
688 S_SQ_ALU_WORD1_DST_REL(alu->dst.rel) |
689 S_SQ_ALU_WORD1_CLAMP(alu->dst.clamp) |
690 S_SQ_ALU_WORD1_OP3_SRC2_SEL(alu->src[2].sel) |
691 S_SQ_ALU_WORD1_OP3_SRC2_REL(alu->src[2].rel) |
692 S_SQ_ALU_WORD1_OP3_SRC2_CHAN(alu->src[2].chan) |
693 S_SQ_ALU_WORD1_OP3_SRC2_NEG(alu->src[2].neg) |
694 S_SQ_ALU_WORD1_OP3_ALU_INST(alu->inst) |
695 S_SQ_ALU_WORD1_BANK_SWIZZLE(alu->bank_swizzle);
696 } else {
697 bc->bytecode[id++] = S_SQ_ALU_WORD1_DST_GPR(alu->dst.sel) |
698 S_SQ_ALU_WORD1_DST_CHAN(alu->dst.chan) |
699 S_SQ_ALU_WORD1_DST_REL(alu->dst.rel) |
700 S_SQ_ALU_WORD1_CLAMP(alu->dst.clamp) |
701 S_SQ_ALU_WORD1_OP2_SRC0_ABS(alu->src[0].abs) |
702 S_SQ_ALU_WORD1_OP2_SRC1_ABS(alu->src[1].abs) |
703 S_SQ_ALU_WORD1_OP2_WRITE_MASK(alu->dst.write) |
704 S_SQ_ALU_WORD1_OP2_ALU_INST(alu->inst) |
705 S_SQ_ALU_WORD1_BANK_SWIZZLE(alu->bank_swizzle) |
706 S_SQ_ALU_WORD1_OP2_UPDATE_EXECUTE_MASK(alu->predicate) |
707 S_SQ_ALU_WORD1_OP2_UPDATE_PRED(alu->predicate);
708 }
709 if (alu->last) {
710 if (alu->nliteral && !alu->literal_added) {
711 R600_ERR("Bug in ALU processing for instruction 0x%08x, literal not added correctly\n", alu->inst);
712 }
713 for (i = 0; i < alu->nliteral; i++) {
714 bc->bytecode[id++] = alu->value[i];
715 }
716 }
717 return 0;
718 }
719
720 /* common for r600/r700 - eg in eg_asm.c */
721 static int r600_bc_cf_build(struct r600_bc *bc, struct r600_bc_cf *cf)
722 {
723 unsigned id = cf->id;
724
725 switch (cf->inst) {
726 case (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU << 3):
727 case (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU_PUSH_BEFORE << 3):
728 bc->bytecode[id++] = S_SQ_CF_ALU_WORD0_ADDR(cf->addr >> 1) |
729 S_SQ_CF_ALU_WORD0_KCACHE_MODE0(cf->kcache0_mode) |
730 S_SQ_CF_ALU_WORD0_KCACHE_BANK0(cf->kcache0_bank) |
731 S_SQ_CF_ALU_WORD0_KCACHE_BANK1(cf->kcache1_bank);
732
733 bc->bytecode[id++] = S_SQ_CF_ALU_WORD1_CF_INST(cf->inst >> 3) |
734 S_SQ_CF_ALU_WORD1_KCACHE_MODE1(cf->kcache1_mode) |
735 S_SQ_CF_ALU_WORD1_KCACHE_ADDR0(cf->kcache0_addr) |
736 S_SQ_CF_ALU_WORD1_KCACHE_ADDR1(cf->kcache1_addr) |
737 S_SQ_CF_ALU_WORD1_BARRIER(1) |
738 S_SQ_CF_ALU_WORD1_USES_WATERFALL(bc->chiprev == CHIPREV_R600 ? cf->r6xx_uses_waterfall : 0) |
739 S_SQ_CF_ALU_WORD1_COUNT((cf->ndw / 2) - 1);
740 break;
741 case V_SQ_CF_WORD1_SQ_CF_INST_TEX:
742 case V_SQ_CF_WORD1_SQ_CF_INST_VTX:
743 case V_SQ_CF_WORD1_SQ_CF_INST_VTX_TC:
744 bc->bytecode[id++] = S_SQ_CF_WORD0_ADDR(cf->addr >> 1);
745 bc->bytecode[id++] = S_SQ_CF_WORD1_CF_INST(cf->inst) |
746 S_SQ_CF_WORD1_BARRIER(1) |
747 S_SQ_CF_WORD1_COUNT((cf->ndw / 4) - 1);
748 break;
749 case V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT:
750 case V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT_DONE:
751 bc->bytecode[id++] = S_SQ_CF_ALLOC_EXPORT_WORD0_RW_GPR(cf->output.gpr) |
752 S_SQ_CF_ALLOC_EXPORT_WORD0_ELEM_SIZE(cf->output.elem_size) |
753 S_SQ_CF_ALLOC_EXPORT_WORD0_ARRAY_BASE(cf->output.array_base) |
754 S_SQ_CF_ALLOC_EXPORT_WORD0_TYPE(cf->output.type);
755 bc->bytecode[id++] = S_SQ_CF_ALLOC_EXPORT_WORD1_SWIZ_SEL_X(cf->output.swizzle_x) |
756 S_SQ_CF_ALLOC_EXPORT_WORD1_SWIZ_SEL_Y(cf->output.swizzle_y) |
757 S_SQ_CF_ALLOC_EXPORT_WORD1_SWIZ_SEL_Z(cf->output.swizzle_z) |
758 S_SQ_CF_ALLOC_EXPORT_WORD1_SWIZ_SEL_W(cf->output.swizzle_w) |
759 S_SQ_CF_ALLOC_EXPORT_WORD1_BARRIER(cf->output.barrier) |
760 S_SQ_CF_ALLOC_EXPORT_WORD1_CF_INST(cf->output.inst) |
761 S_SQ_CF_ALLOC_EXPORT_WORD1_END_OF_PROGRAM(cf->output.end_of_program);
762 break;
763 case V_SQ_CF_WORD1_SQ_CF_INST_JUMP:
764 case V_SQ_CF_WORD1_SQ_CF_INST_ELSE:
765 case V_SQ_CF_WORD1_SQ_CF_INST_POP:
766 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_START_NO_AL:
767 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_END:
768 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_CONTINUE:
769 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_BREAK:
770 case V_SQ_CF_WORD1_SQ_CF_INST_CALL_FS:
771 case V_SQ_CF_WORD1_SQ_CF_INST_RETURN:
772 bc->bytecode[id++] = S_SQ_CF_WORD0_ADDR(cf->cf_addr >> 1);
773 bc->bytecode[id++] = S_SQ_CF_WORD1_CF_INST(cf->inst) |
774 S_SQ_CF_WORD1_BARRIER(1) |
775 S_SQ_CF_WORD1_COND(cf->cond) |
776 S_SQ_CF_WORD1_POP_COUNT(cf->pop_count);
777
778 break;
779 default:
780 R600_ERR("unsupported CF instruction (0x%X)\n", cf->inst);
781 return -EINVAL;
782 }
783 return 0;
784 }
785
786 int r600_bc_build(struct r600_bc *bc)
787 {
788 struct r600_bc_cf *cf;
789 struct r600_bc_alu *alu;
790 struct r600_bc_vtx *vtx;
791 struct r600_bc_tex *tex;
792 unsigned addr;
793 int r;
794
795 if (bc->callstack[0].max > 0)
796 bc->nstack = ((bc->callstack[0].max + 3) >> 2) + 2;
797 if (bc->type == TGSI_PROCESSOR_VERTEX && !bc->nstack) {
798 bc->nstack = 1;
799 }
800
801 /* first path compute addr of each CF block */
802 /* addr start after all the CF instructions */
803 addr = bc->cf_last->id + 2;
804 LIST_FOR_EACH_ENTRY(cf, &bc->cf, list) {
805 switch (cf->inst) {
806 case (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU << 3):
807 case (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU_PUSH_BEFORE << 3):
808 break;
809 case V_SQ_CF_WORD1_SQ_CF_INST_TEX:
810 case V_SQ_CF_WORD1_SQ_CF_INST_VTX:
811 case V_SQ_CF_WORD1_SQ_CF_INST_VTX_TC:
812 /* fetch node need to be 16 bytes aligned*/
813 addr += 3;
814 addr &= 0xFFFFFFFCUL;
815 break;
816 case V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT:
817 case V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT_DONE:
818 case EG_V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT:
819 case EG_V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT_DONE:
820 break;
821 case V_SQ_CF_WORD1_SQ_CF_INST_JUMP:
822 case V_SQ_CF_WORD1_SQ_CF_INST_ELSE:
823 case V_SQ_CF_WORD1_SQ_CF_INST_POP:
824 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_START_NO_AL:
825 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_END:
826 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_CONTINUE:
827 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_BREAK:
828 case V_SQ_CF_WORD1_SQ_CF_INST_CALL_FS:
829 case V_SQ_CF_WORD1_SQ_CF_INST_RETURN:
830 break;
831 default:
832 R600_ERR("unsupported CF instruction (0x%X)\n", cf->inst);
833 return -EINVAL;
834 }
835 cf->addr = addr;
836 addr += cf->ndw;
837 bc->ndw = cf->addr + cf->ndw;
838 }
839 free(bc->bytecode);
840 bc->bytecode = calloc(1, bc->ndw * 4);
841 if (bc->bytecode == NULL)
842 return -ENOMEM;
843 LIST_FOR_EACH_ENTRY(cf, &bc->cf, list) {
844 addr = cf->addr;
845 if (bc->chiprev == CHIPREV_EVERGREEN)
846 r = eg_bc_cf_build(bc, cf);
847 else
848 r = r600_bc_cf_build(bc, cf);
849 if (r)
850 return r;
851 switch (cf->inst) {
852 case (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU << 3):
853 case (V_SQ_CF_ALU_WORD1_SQ_CF_INST_ALU_PUSH_BEFORE << 3):
854 LIST_FOR_EACH_ENTRY(alu, &cf->alu, list) {
855 switch(bc->chiprev) {
856 case CHIPREV_R600:
857 r = r600_bc_alu_build(bc, alu, addr);
858 break;
859 case CHIPREV_R700:
860 case CHIPREV_EVERGREEN: /* eg alu is same encoding as r700 */
861 r = r700_bc_alu_build(bc, alu, addr);
862 break;
863 default:
864 R600_ERR("unknown family %d\n", bc->family);
865 return -EINVAL;
866 }
867 if (r)
868 return r;
869 addr += 2;
870 if (alu->last) {
871 addr += alu->nliteral;
872 }
873 }
874 break;
875 case V_SQ_CF_WORD1_SQ_CF_INST_VTX:
876 case V_SQ_CF_WORD1_SQ_CF_INST_VTX_TC:
877 LIST_FOR_EACH_ENTRY(vtx, &cf->vtx, list) {
878 r = r600_bc_vtx_build(bc, vtx, addr);
879 if (r)
880 return r;
881 addr += 4;
882 }
883 break;
884 case V_SQ_CF_WORD1_SQ_CF_INST_TEX:
885 LIST_FOR_EACH_ENTRY(tex, &cf->tex, list) {
886 r = r600_bc_tex_build(bc, tex, addr);
887 if (r)
888 return r;
889 addr += 4;
890 }
891 break;
892 case V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT:
893 case V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT_DONE:
894 case EG_V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT:
895 case EG_V_SQ_CF_ALLOC_EXPORT_WORD1_SQ_CF_INST_EXPORT_DONE:
896 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_START_NO_AL:
897 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_END:
898 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_CONTINUE:
899 case V_SQ_CF_WORD1_SQ_CF_INST_LOOP_BREAK:
900 case V_SQ_CF_WORD1_SQ_CF_INST_JUMP:
901 case V_SQ_CF_WORD1_SQ_CF_INST_ELSE:
902 case V_SQ_CF_WORD1_SQ_CF_INST_POP:
903 case V_SQ_CF_WORD1_SQ_CF_INST_CALL_FS:
904 case V_SQ_CF_WORD1_SQ_CF_INST_RETURN:
905 break;
906 default:
907 R600_ERR("unsupported CF instruction (0x%X)\n", cf->inst);
908 return -EINVAL;
909 }
910 }
911 return 0;
912 }
913
914 void r600_bc_clear(struct r600_bc *bc)
915 {
916 struct r600_bc_cf *cf = NULL, *next_cf;
917
918 free(bc->bytecode);
919 bc->bytecode = NULL;
920
921 LIST_FOR_EACH_ENTRY_SAFE(cf, next_cf, &bc->cf, list) {
922 struct r600_bc_alu *alu = NULL, *next_alu;
923 struct r600_bc_tex *tex = NULL, *next_tex;
924 struct r600_bc_tex *vtx = NULL, *next_vtx;
925
926 LIST_FOR_EACH_ENTRY_SAFE(alu, next_alu, &cf->alu, list) {
927 free(alu);
928 }
929
930 LIST_INITHEAD(&cf->alu);
931
932 LIST_FOR_EACH_ENTRY_SAFE(tex, next_tex, &cf->tex, list) {
933 free(tex);
934 }
935
936 LIST_INITHEAD(&cf->tex);
937
938 LIST_FOR_EACH_ENTRY_SAFE(vtx, next_vtx, &cf->vtx, list) {
939 free(vtx);
940 }
941
942 LIST_INITHEAD(&cf->vtx);
943
944 free(cf);
945 }
946
947 LIST_INITHEAD(&cf->list);
948 }