gallium/auxiliary: Trivial code style cleanup
[mesa.git] / src / gallium / auxiliary / tgsi / tgsi_ureg.c
1 /**************************************************************************
2 *
3 * Copyright 2009-2010 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_info.h"
35 #include "tgsi/tgsi_dump.h"
36 #include "tgsi/tgsi_sanity.h"
37 #include "util/u_debug.h"
38 #include "util/u_inlines.h"
39 #include "util/u_memory.h"
40 #include "util/u_math.h"
41 #include "util/u_bitmask.h"
42
43 union tgsi_any_token {
44 struct tgsi_header header;
45 struct tgsi_processor processor;
46 struct tgsi_token token;
47 struct tgsi_property prop;
48 struct tgsi_property_data prop_data;
49 struct tgsi_declaration decl;
50 struct tgsi_declaration_range decl_range;
51 struct tgsi_declaration_dimension decl_dim;
52 struct tgsi_declaration_interp decl_interp;
53 struct tgsi_declaration_semantic decl_semantic;
54 struct tgsi_declaration_sampler_view decl_sampler_view;
55 struct tgsi_declaration_array array;
56 struct tgsi_immediate imm;
57 union tgsi_immediate_data imm_data;
58 struct tgsi_instruction insn;
59 struct tgsi_instruction_predicate insn_predicate;
60 struct tgsi_instruction_label insn_label;
61 struct tgsi_instruction_texture insn_texture;
62 struct tgsi_texture_offset insn_texture_offset;
63 struct tgsi_src_register src;
64 struct tgsi_ind_register ind;
65 struct tgsi_dimension dim;
66 struct tgsi_dst_register dst;
67 unsigned value;
68 };
69
70
71 struct ureg_tokens {
72 union tgsi_any_token *tokens;
73 unsigned size;
74 unsigned order;
75 unsigned count;
76 };
77
78 #define UREG_MAX_INPUT PIPE_MAX_SHADER_INPUTS
79 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
80 #define UREG_MAX_OUTPUT PIPE_MAX_SHADER_OUTPUTS
81 #define UREG_MAX_CONSTANT_RANGE 32
82 #define UREG_MAX_IMMEDIATE 4096
83 #define UREG_MAX_ADDR 3
84 #define UREG_MAX_PRED 1
85 #define UREG_MAX_ARRAY_TEMPS 256
86
87 struct const_decl {
88 struct {
89 unsigned first;
90 unsigned last;
91 } constant_range[UREG_MAX_CONSTANT_RANGE];
92 unsigned nr_constant_ranges;
93 };
94
95 #define DOMAIN_DECL 0
96 #define DOMAIN_INSN 1
97
98 struct ureg_program
99 {
100 unsigned processor;
101 bool supports_any_inout_decl_range;
102
103 struct {
104 unsigned semantic_name;
105 unsigned semantic_index;
106 unsigned interp;
107 unsigned char cylindrical_wrap;
108 unsigned interp_location;
109 unsigned first;
110 unsigned last;
111 unsigned array_id;
112 } input[UREG_MAX_INPUT];
113 unsigned nr_inputs, nr_input_regs;
114
115 unsigned vs_inputs[PIPE_MAX_ATTRIBS/32];
116
117 struct {
118 unsigned index;
119 unsigned semantic_name;
120 unsigned semantic_index;
121 } system_value[UREG_MAX_SYSTEM_VALUE];
122 unsigned nr_system_values;
123
124 struct {
125 unsigned semantic_name;
126 unsigned semantic_index;
127 unsigned usage_mask; /* = TGSI_WRITEMASK_* */
128 unsigned first;
129 unsigned last;
130 unsigned array_id;
131 } output[UREG_MAX_OUTPUT];
132 unsigned nr_outputs, nr_output_regs;
133
134 struct {
135 union {
136 float f[4];
137 unsigned u[4];
138 int i[4];
139 } value;
140 unsigned nr;
141 unsigned type;
142 } immediate[UREG_MAX_IMMEDIATE];
143 unsigned nr_immediates;
144
145 struct ureg_src sampler[PIPE_MAX_SAMPLERS];
146 unsigned nr_samplers;
147
148 struct {
149 unsigned index;
150 unsigned target;
151 unsigned return_type_x;
152 unsigned return_type_y;
153 unsigned return_type_z;
154 unsigned return_type_w;
155 } sampler_view[PIPE_MAX_SHADER_SAMPLER_VIEWS];
156 unsigned nr_sampler_views;
157
158 struct util_bitmask *free_temps;
159 struct util_bitmask *local_temps;
160 struct util_bitmask *decl_temps;
161 unsigned nr_temps;
162
163 unsigned array_temps[UREG_MAX_ARRAY_TEMPS];
164 unsigned nr_array_temps;
165
166 struct const_decl const_decls;
167 struct const_decl const_decls2D[PIPE_MAX_CONSTANT_BUFFERS];
168
169 unsigned properties[TGSI_PROPERTY_COUNT];
170
171 unsigned nr_addrs;
172 unsigned nr_preds;
173 unsigned nr_instructions;
174
175 struct ureg_tokens domain[2];
176 };
177
178 static union tgsi_any_token error_tokens[32];
179
180 static void tokens_error( struct ureg_tokens *tokens )
181 {
182 if (tokens->tokens && tokens->tokens != error_tokens)
183 FREE(tokens->tokens);
184
185 tokens->tokens = error_tokens;
186 tokens->size = Elements(error_tokens);
187 tokens->count = 0;
188 }
189
190
191 static void tokens_expand( struct ureg_tokens *tokens,
192 unsigned count )
193 {
194 unsigned old_size = tokens->size * sizeof(unsigned);
195
196 if (tokens->tokens == error_tokens) {
197 return;
198 }
199
200 while (tokens->count + count > tokens->size) {
201 tokens->size = (1 << ++tokens->order);
202 }
203
204 tokens->tokens = REALLOC(tokens->tokens,
205 old_size,
206 tokens->size * sizeof(unsigned));
207 if (tokens->tokens == NULL) {
208 tokens_error(tokens);
209 }
210 }
211
212 static void set_bad( struct ureg_program *ureg )
213 {
214 tokens_error(&ureg->domain[0]);
215 }
216
217
218
219 static union tgsi_any_token *get_tokens( struct ureg_program *ureg,
220 unsigned domain,
221 unsigned count )
222 {
223 struct ureg_tokens *tokens = &ureg->domain[domain];
224 union tgsi_any_token *result;
225
226 if (tokens->count + count > tokens->size)
227 tokens_expand(tokens, count);
228
229 result = &tokens->tokens[tokens->count];
230 tokens->count += count;
231 return result;
232 }
233
234
235 static union tgsi_any_token *retrieve_token( struct ureg_program *ureg,
236 unsigned domain,
237 unsigned nr )
238 {
239 if (ureg->domain[domain].tokens == error_tokens)
240 return &error_tokens[0];
241
242 return &ureg->domain[domain].tokens[nr];
243 }
244
245 void
246 ureg_property(struct ureg_program *ureg, unsigned name, unsigned value)
247 {
248 assert(name < Elements(ureg->properties));
249 ureg->properties[name] = value;
250 }
251
252 struct ureg_src
253 ureg_DECL_fs_input_cyl_centroid(struct ureg_program *ureg,
254 unsigned semantic_name,
255 unsigned semantic_index,
256 unsigned interp_mode,
257 unsigned cylindrical_wrap,
258 unsigned interp_location,
259 unsigned array_id,
260 unsigned array_size)
261 {
262 unsigned i;
263
264 for (i = 0; i < ureg->nr_inputs; i++) {
265 if (ureg->input[i].semantic_name == semantic_name &&
266 ureg->input[i].semantic_index == semantic_index) {
267 assert(ureg->input[i].interp == interp_mode);
268 assert(ureg->input[i].cylindrical_wrap == cylindrical_wrap);
269 assert(ureg->input[i].interp_location == interp_location);
270 assert(ureg->input[i].array_id == array_id);
271 goto out;
272 }
273 }
274
275 if (ureg->nr_inputs < UREG_MAX_INPUT) {
276 assert(array_size >= 1);
277 ureg->input[i].semantic_name = semantic_name;
278 ureg->input[i].semantic_index = semantic_index;
279 ureg->input[i].interp = interp_mode;
280 ureg->input[i].cylindrical_wrap = cylindrical_wrap;
281 ureg->input[i].interp_location = interp_location;
282 ureg->input[i].first = ureg->nr_input_regs;
283 ureg->input[i].last = ureg->nr_input_regs + array_size - 1;
284 ureg->input[i].array_id = array_id;
285 ureg->nr_input_regs += array_size;
286 ureg->nr_inputs++;
287 } else {
288 set_bad(ureg);
289 }
290
291 out:
292 return ureg_src_array_register(TGSI_FILE_INPUT, ureg->input[i].first,
293 array_id);
294 }
295
296
297 struct ureg_src
298 ureg_DECL_vs_input( struct ureg_program *ureg,
299 unsigned index )
300 {
301 assert(ureg->processor == TGSI_PROCESSOR_VERTEX);
302 assert(index / 32 < ARRAY_SIZE(ureg->vs_inputs));
303
304 ureg->vs_inputs[index/32] |= 1 << (index % 32);
305 return ureg_src_register( TGSI_FILE_INPUT, index );
306 }
307
308
309 struct ureg_src
310 ureg_DECL_input(struct ureg_program *ureg,
311 unsigned semantic_name,
312 unsigned semantic_index,
313 unsigned array_id,
314 unsigned array_size)
315 {
316 return ureg_DECL_fs_input_cyl_centroid(ureg, semantic_name, semantic_index,
317 0, 0, 0, array_id, array_size);
318 }
319
320
321 struct ureg_src
322 ureg_DECL_system_value(struct ureg_program *ureg,
323 unsigned index,
324 unsigned semantic_name,
325 unsigned semantic_index)
326 {
327 if (ureg->nr_system_values < UREG_MAX_SYSTEM_VALUE) {
328 ureg->system_value[ureg->nr_system_values].index = index;
329 ureg->system_value[ureg->nr_system_values].semantic_name = semantic_name;
330 ureg->system_value[ureg->nr_system_values].semantic_index = semantic_index;
331 ureg->nr_system_values++;
332 } else {
333 set_bad(ureg);
334 }
335
336 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE, index);
337 }
338
339
340 struct ureg_dst
341 ureg_DECL_output_masked(struct ureg_program *ureg,
342 unsigned name,
343 unsigned index,
344 unsigned usage_mask,
345 unsigned array_id,
346 unsigned array_size)
347 {
348 unsigned i;
349
350 assert(usage_mask != 0);
351
352 for (i = 0; i < ureg->nr_outputs; i++) {
353 if (ureg->output[i].semantic_name == name &&
354 ureg->output[i].semantic_index == index) {
355 assert(ureg->output[i].array_id == array_id);
356 ureg->output[i].usage_mask |= usage_mask;
357 goto out;
358 }
359 }
360
361 if (ureg->nr_outputs < UREG_MAX_OUTPUT) {
362 ureg->output[i].semantic_name = name;
363 ureg->output[i].semantic_index = index;
364 ureg->output[i].usage_mask = usage_mask;
365 ureg->output[i].first = ureg->nr_output_regs;
366 ureg->output[i].last = ureg->nr_output_regs + array_size - 1;
367 ureg->output[i].array_id = array_id;
368 ureg->nr_output_regs += array_size;
369 ureg->nr_outputs++;
370 }
371 else {
372 set_bad( ureg );
373 }
374
375 out:
376 return ureg_dst_array_register(TGSI_FILE_OUTPUT, ureg->output[i].first,
377 array_id);
378 }
379
380
381 struct ureg_dst
382 ureg_DECL_output(struct ureg_program *ureg,
383 unsigned name,
384 unsigned index)
385 {
386 return ureg_DECL_output_masked(ureg, name, index, TGSI_WRITEMASK_XYZW,
387 0, 1);
388 }
389
390 struct ureg_dst
391 ureg_DECL_output_array(struct ureg_program *ureg,
392 unsigned semantic_name,
393 unsigned semantic_index,
394 unsigned array_id,
395 unsigned array_size)
396 {
397 return ureg_DECL_output_masked(ureg, semantic_name, semantic_index,
398 TGSI_WRITEMASK_XYZW,
399 array_id, array_size);
400 }
401
402
403 /* Returns a new constant register. Keep track of which have been
404 * referred to so that we can emit decls later.
405 *
406 * Constant operands declared with this function must be addressed
407 * with a two-dimensional index.
408 *
409 * There is nothing in this code to bind this constant to any tracked
410 * value or manage any constant_buffer contents -- that's the
411 * resposibility of the calling code.
412 */
413 void
414 ureg_DECL_constant2D(struct ureg_program *ureg,
415 unsigned first,
416 unsigned last,
417 unsigned index2D)
418 {
419 struct const_decl *decl = &ureg->const_decls2D[index2D];
420
421 assert(index2D < PIPE_MAX_CONSTANT_BUFFERS);
422
423 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
424 uint i = decl->nr_constant_ranges++;
425
426 decl->constant_range[i].first = first;
427 decl->constant_range[i].last = last;
428 }
429 }
430
431
432 /* A one-dimensional, depricated version of ureg_DECL_constant2D().
433 *
434 * Constant operands declared with this function must be addressed
435 * with a one-dimensional index.
436 */
437 struct ureg_src
438 ureg_DECL_constant(struct ureg_program *ureg,
439 unsigned index)
440 {
441 struct const_decl *decl = &ureg->const_decls;
442 unsigned minconst = index, maxconst = index;
443 unsigned i;
444
445 /* Inside existing range?
446 */
447 for (i = 0; i < decl->nr_constant_ranges; i++) {
448 if (decl->constant_range[i].first <= index &&
449 decl->constant_range[i].last >= index) {
450 goto out;
451 }
452 }
453
454 /* Extend existing range?
455 */
456 for (i = 0; i < decl->nr_constant_ranges; i++) {
457 if (decl->constant_range[i].last == index - 1) {
458 decl->constant_range[i].last = index;
459 goto out;
460 }
461
462 if (decl->constant_range[i].first == index + 1) {
463 decl->constant_range[i].first = index;
464 goto out;
465 }
466
467 minconst = MIN2(minconst, decl->constant_range[i].first);
468 maxconst = MAX2(maxconst, decl->constant_range[i].last);
469 }
470
471 /* Create new range?
472 */
473 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
474 i = decl->nr_constant_ranges++;
475 decl->constant_range[i].first = index;
476 decl->constant_range[i].last = index;
477 goto out;
478 }
479
480 /* Collapse all ranges down to one:
481 */
482 i = 0;
483 decl->constant_range[0].first = minconst;
484 decl->constant_range[0].last = maxconst;
485 decl->nr_constant_ranges = 1;
486
487 out:
488 assert(i < decl->nr_constant_ranges);
489 assert(decl->constant_range[i].first <= index);
490 assert(decl->constant_range[i].last >= index);
491 return ureg_src_register(TGSI_FILE_CONSTANT, index);
492 }
493
494 static struct ureg_dst alloc_temporary( struct ureg_program *ureg,
495 boolean local )
496 {
497 unsigned i;
498
499 /* Look for a released temporary.
500 */
501 for (i = util_bitmask_get_first_index(ureg->free_temps);
502 i != UTIL_BITMASK_INVALID_INDEX;
503 i = util_bitmask_get_next_index(ureg->free_temps, i + 1)) {
504 if (util_bitmask_get(ureg->local_temps, i) == local)
505 break;
506 }
507
508 /* Or allocate a new one.
509 */
510 if (i == UTIL_BITMASK_INVALID_INDEX) {
511 i = ureg->nr_temps++;
512
513 if (local)
514 util_bitmask_set(ureg->local_temps, i);
515
516 /* Start a new declaration when the local flag changes */
517 if (!i || util_bitmask_get(ureg->local_temps, i - 1) != local)
518 util_bitmask_set(ureg->decl_temps, i);
519 }
520
521 util_bitmask_clear(ureg->free_temps, i);
522
523 return ureg_dst_register( TGSI_FILE_TEMPORARY, i );
524 }
525
526 struct ureg_dst ureg_DECL_temporary( struct ureg_program *ureg )
527 {
528 return alloc_temporary(ureg, FALSE);
529 }
530
531 struct ureg_dst ureg_DECL_local_temporary( struct ureg_program *ureg )
532 {
533 return alloc_temporary(ureg, TRUE);
534 }
535
536 struct ureg_dst ureg_DECL_array_temporary( struct ureg_program *ureg,
537 unsigned size,
538 boolean local )
539 {
540 unsigned i = ureg->nr_temps;
541 struct ureg_dst dst = ureg_dst_register( TGSI_FILE_TEMPORARY, i );
542
543 if (local)
544 util_bitmask_set(ureg->local_temps, i);
545
546 /* Always start a new declaration at the start */
547 util_bitmask_set(ureg->decl_temps, i);
548
549 ureg->nr_temps += size;
550
551 /* and also at the end of the array */
552 util_bitmask_set(ureg->decl_temps, ureg->nr_temps);
553
554 if (ureg->nr_array_temps < UREG_MAX_ARRAY_TEMPS) {
555 ureg->array_temps[ureg->nr_array_temps++] = i;
556 dst.ArrayID = ureg->nr_array_temps;
557 }
558
559 return dst;
560 }
561
562 void ureg_release_temporary( struct ureg_program *ureg,
563 struct ureg_dst tmp )
564 {
565 if(tmp.File == TGSI_FILE_TEMPORARY)
566 util_bitmask_set(ureg->free_temps, tmp.Index);
567 }
568
569
570 /* Allocate a new address register.
571 */
572 struct ureg_dst ureg_DECL_address( struct ureg_program *ureg )
573 {
574 if (ureg->nr_addrs < UREG_MAX_ADDR)
575 return ureg_dst_register( TGSI_FILE_ADDRESS, ureg->nr_addrs++ );
576
577 assert( 0 );
578 return ureg_dst_register( TGSI_FILE_ADDRESS, 0 );
579 }
580
581 /* Allocate a new predicate register.
582 */
583 struct ureg_dst
584 ureg_DECL_predicate(struct ureg_program *ureg)
585 {
586 if (ureg->nr_preds < UREG_MAX_PRED) {
587 return ureg_dst_register(TGSI_FILE_PREDICATE, ureg->nr_preds++);
588 }
589
590 assert(0);
591 return ureg_dst_register(TGSI_FILE_PREDICATE, 0);
592 }
593
594 /* Allocate a new sampler.
595 */
596 struct ureg_src ureg_DECL_sampler( struct ureg_program *ureg,
597 unsigned nr )
598 {
599 unsigned i;
600
601 for (i = 0; i < ureg->nr_samplers; i++)
602 if (ureg->sampler[i].Index == nr)
603 return ureg->sampler[i];
604
605 if (i < PIPE_MAX_SAMPLERS) {
606 ureg->sampler[i] = ureg_src_register( TGSI_FILE_SAMPLER, nr );
607 ureg->nr_samplers++;
608 return ureg->sampler[i];
609 }
610
611 assert( 0 );
612 return ureg->sampler[0];
613 }
614
615 /*
616 * Allocate a new shader sampler view.
617 */
618 struct ureg_src
619 ureg_DECL_sampler_view(struct ureg_program *ureg,
620 unsigned index,
621 unsigned target,
622 unsigned return_type_x,
623 unsigned return_type_y,
624 unsigned return_type_z,
625 unsigned return_type_w)
626 {
627 struct ureg_src reg = ureg_src_register(TGSI_FILE_SAMPLER_VIEW, index);
628 uint i;
629
630 for (i = 0; i < ureg->nr_sampler_views; i++) {
631 if (ureg->sampler_view[i].index == index) {
632 return reg;
633 }
634 }
635
636 if (i < PIPE_MAX_SHADER_SAMPLER_VIEWS) {
637 ureg->sampler_view[i].index = index;
638 ureg->sampler_view[i].target = target;
639 ureg->sampler_view[i].return_type_x = return_type_x;
640 ureg->sampler_view[i].return_type_y = return_type_y;
641 ureg->sampler_view[i].return_type_z = return_type_z;
642 ureg->sampler_view[i].return_type_w = return_type_w;
643 ureg->nr_sampler_views++;
644 return reg;
645 }
646
647 assert(0);
648 return reg;
649 }
650
651 static int
652 match_or_expand_immediate64( const unsigned *v,
653 int type,
654 unsigned nr,
655 unsigned *v2,
656 unsigned *pnr2,
657 unsigned *swizzle )
658 {
659 unsigned nr2 = *pnr2;
660 unsigned i, j;
661 *swizzle = 0;
662
663 for (i = 0; i < nr; i += 2) {
664 boolean found = FALSE;
665
666 for (j = 0; j < nr2 && !found; j += 2) {
667 if (v[i] == v2[j] && v[i + 1] == v2[j + 1]) {
668 *swizzle |= (j << (i * 2)) | ((j + 1) << ((i + 1) * 2));
669 found = TRUE;
670 }
671 }
672 if (!found) {
673 if ((nr2) >= 4) {
674 return FALSE;
675 }
676
677 v2[nr2] = v[i];
678 v2[nr2 + 1] = v[i + 1];
679
680 *swizzle |= (nr2 << (i * 2)) | ((nr2 + 1) << ((i + 1) * 2));
681 nr2 += 2;
682 }
683 }
684
685 /* Actually expand immediate only when fully succeeded.
686 */
687 *pnr2 = nr2;
688 return TRUE;
689 }
690
691 static int
692 match_or_expand_immediate( const unsigned *v,
693 int type,
694 unsigned nr,
695 unsigned *v2,
696 unsigned *pnr2,
697 unsigned *swizzle )
698 {
699 unsigned nr2 = *pnr2;
700 unsigned i, j;
701
702 if (type == TGSI_IMM_FLOAT64)
703 return match_or_expand_immediate64(v, type, nr, v2, pnr2, swizzle);
704
705 *swizzle = 0;
706
707 for (i = 0; i < nr; i++) {
708 boolean found = FALSE;
709
710 for (j = 0; j < nr2 && !found; j++) {
711 if (v[i] == v2[j]) {
712 *swizzle |= j << (i * 2);
713 found = TRUE;
714 }
715 }
716
717 if (!found) {
718 if (nr2 >= 4) {
719 return FALSE;
720 }
721
722 v2[nr2] = v[i];
723 *swizzle |= nr2 << (i * 2);
724 nr2++;
725 }
726 }
727
728 /* Actually expand immediate only when fully succeeded.
729 */
730 *pnr2 = nr2;
731 return TRUE;
732 }
733
734
735 static struct ureg_src
736 decl_immediate( struct ureg_program *ureg,
737 const unsigned *v,
738 unsigned nr,
739 unsigned type )
740 {
741 unsigned i, j;
742 unsigned swizzle = 0;
743
744 /* Could do a first pass where we examine all existing immediates
745 * without expanding.
746 */
747
748 for (i = 0; i < ureg->nr_immediates; i++) {
749 if (ureg->immediate[i].type != type) {
750 continue;
751 }
752 if (match_or_expand_immediate(v,
753 type,
754 nr,
755 ureg->immediate[i].value.u,
756 &ureg->immediate[i].nr,
757 &swizzle)) {
758 goto out;
759 }
760 }
761
762 if (ureg->nr_immediates < UREG_MAX_IMMEDIATE) {
763 i = ureg->nr_immediates++;
764 ureg->immediate[i].type = type;
765 if (match_or_expand_immediate(v,
766 type,
767 nr,
768 ureg->immediate[i].value.u,
769 &ureg->immediate[i].nr,
770 &swizzle)) {
771 goto out;
772 }
773 }
774
775 set_bad(ureg);
776
777 out:
778 /* Make sure that all referenced elements are from this immediate.
779 * Has the effect of making size-one immediates into scalars.
780 */
781 if (type == TGSI_IMM_FLOAT64) {
782 for (j = nr; j < 4; j+=2) {
783 swizzle |= (swizzle & 0xf) << (j * 2);
784 }
785 } else {
786 for (j = nr; j < 4; j++) {
787 swizzle |= (swizzle & 0x3) << (j * 2);
788 }
789 }
790 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE, i),
791 (swizzle >> 0) & 0x3,
792 (swizzle >> 2) & 0x3,
793 (swizzle >> 4) & 0x3,
794 (swizzle >> 6) & 0x3);
795 }
796
797
798 struct ureg_src
799 ureg_DECL_immediate( struct ureg_program *ureg,
800 const float *v,
801 unsigned nr )
802 {
803 union {
804 float f[4];
805 unsigned u[4];
806 } fu;
807 unsigned int i;
808
809 for (i = 0; i < nr; i++) {
810 fu.f[i] = v[i];
811 }
812
813 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT32);
814 }
815
816 struct ureg_src
817 ureg_DECL_immediate_f64( struct ureg_program *ureg,
818 const double *v,
819 unsigned nr )
820 {
821 union {
822 unsigned u[4];
823 double d[2];
824 } fu;
825 unsigned int i;
826
827 assert((nr / 2) < 3);
828 for (i = 0; i < nr / 2; i++) {
829 fu.d[i] = v[i];
830 }
831
832 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT64);
833 }
834
835 struct ureg_src
836 ureg_DECL_immediate_uint( struct ureg_program *ureg,
837 const unsigned *v,
838 unsigned nr )
839 {
840 return decl_immediate(ureg, v, nr, TGSI_IMM_UINT32);
841 }
842
843
844 struct ureg_src
845 ureg_DECL_immediate_block_uint( struct ureg_program *ureg,
846 const unsigned *v,
847 unsigned nr )
848 {
849 uint index;
850 uint i;
851
852 if (ureg->nr_immediates + (nr + 3) / 4 > UREG_MAX_IMMEDIATE) {
853 set_bad(ureg);
854 return ureg_src_register(TGSI_FILE_IMMEDIATE, 0);
855 }
856
857 index = ureg->nr_immediates;
858 ureg->nr_immediates += (nr + 3) / 4;
859
860 for (i = index; i < ureg->nr_immediates; i++) {
861 ureg->immediate[i].type = TGSI_IMM_UINT32;
862 ureg->immediate[i].nr = nr > 4 ? 4 : nr;
863 memcpy(ureg->immediate[i].value.u,
864 &v[(i - index) * 4],
865 ureg->immediate[i].nr * sizeof(uint));
866 nr -= 4;
867 }
868
869 return ureg_src_register(TGSI_FILE_IMMEDIATE, index);
870 }
871
872
873 struct ureg_src
874 ureg_DECL_immediate_int( struct ureg_program *ureg,
875 const int *v,
876 unsigned nr )
877 {
878 return decl_immediate(ureg, (const unsigned *)v, nr, TGSI_IMM_INT32);
879 }
880
881
882 void
883 ureg_emit_src( struct ureg_program *ureg,
884 struct ureg_src src )
885 {
886 unsigned size = 1 + (src.Indirect ? 1 : 0) +
887 (src.Dimension ? (src.DimIndirect ? 2 : 1) : 0);
888
889 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
890 unsigned n = 0;
891
892 assert(src.File != TGSI_FILE_NULL);
893 assert(src.File < TGSI_FILE_COUNT);
894
895 out[n].value = 0;
896 out[n].src.File = src.File;
897 out[n].src.SwizzleX = src.SwizzleX;
898 out[n].src.SwizzleY = src.SwizzleY;
899 out[n].src.SwizzleZ = src.SwizzleZ;
900 out[n].src.SwizzleW = src.SwizzleW;
901 out[n].src.Index = src.Index;
902 out[n].src.Negate = src.Negate;
903 out[0].src.Absolute = src.Absolute;
904 n++;
905
906 if (src.Indirect) {
907 out[0].src.Indirect = 1;
908 out[n].value = 0;
909 out[n].ind.File = src.IndirectFile;
910 out[n].ind.Swizzle = src.IndirectSwizzle;
911 out[n].ind.Index = src.IndirectIndex;
912 if (!ureg->supports_any_inout_decl_range &&
913 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
914 out[n].ind.ArrayID = 0;
915 else
916 out[n].ind.ArrayID = src.ArrayID;
917 n++;
918 }
919
920 if (src.Dimension) {
921 out[0].src.Dimension = 1;
922 out[n].dim.Dimension = 0;
923 out[n].dim.Padding = 0;
924 if (src.DimIndirect) {
925 out[n].dim.Indirect = 1;
926 out[n].dim.Index = src.DimensionIndex;
927 n++;
928 out[n].value = 0;
929 out[n].ind.File = src.DimIndFile;
930 out[n].ind.Swizzle = src.DimIndSwizzle;
931 out[n].ind.Index = src.DimIndIndex;
932 if (!ureg->supports_any_inout_decl_range &&
933 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
934 out[n].ind.ArrayID = 0;
935 else
936 out[n].ind.ArrayID = src.ArrayID;
937 } else {
938 out[n].dim.Indirect = 0;
939 out[n].dim.Index = src.DimensionIndex;
940 }
941 n++;
942 }
943
944 assert(n == size);
945 }
946
947
948 void
949 ureg_emit_dst( struct ureg_program *ureg,
950 struct ureg_dst dst )
951 {
952 unsigned size = 1 + (dst.Indirect ? 1 : 0) +
953 (dst.Dimension ? (dst.DimIndirect ? 2 : 1) : 0);
954
955 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
956 unsigned n = 0;
957
958 assert(dst.File != TGSI_FILE_NULL);
959 assert(dst.File != TGSI_FILE_CONSTANT);
960 assert(dst.File != TGSI_FILE_INPUT);
961 assert(dst.File != TGSI_FILE_SAMPLER);
962 assert(dst.File != TGSI_FILE_SAMPLER_VIEW);
963 assert(dst.File != TGSI_FILE_IMMEDIATE);
964 assert(dst.File < TGSI_FILE_COUNT);
965
966 out[n].value = 0;
967 out[n].dst.File = dst.File;
968 out[n].dst.WriteMask = dst.WriteMask;
969 out[n].dst.Indirect = dst.Indirect;
970 out[n].dst.Index = dst.Index;
971 n++;
972
973 if (dst.Indirect) {
974 out[n].value = 0;
975 out[n].ind.File = dst.IndirectFile;
976 out[n].ind.Swizzle = dst.IndirectSwizzle;
977 out[n].ind.Index = dst.IndirectIndex;
978 if (!ureg->supports_any_inout_decl_range &&
979 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
980 out[n].ind.ArrayID = 0;
981 else
982 out[n].ind.ArrayID = dst.ArrayID;
983 n++;
984 }
985
986 if (dst.Dimension) {
987 out[0].dst.Dimension = 1;
988 out[n].dim.Dimension = 0;
989 out[n].dim.Padding = 0;
990 if (dst.DimIndirect) {
991 out[n].dim.Indirect = 1;
992 out[n].dim.Index = dst.DimensionIndex;
993 n++;
994 out[n].value = 0;
995 out[n].ind.File = dst.DimIndFile;
996 out[n].ind.Swizzle = dst.DimIndSwizzle;
997 out[n].ind.Index = dst.DimIndIndex;
998 if (!ureg->supports_any_inout_decl_range &&
999 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
1000 out[n].ind.ArrayID = 0;
1001 else
1002 out[n].ind.ArrayID = dst.ArrayID;
1003 } else {
1004 out[n].dim.Indirect = 0;
1005 out[n].dim.Index = dst.DimensionIndex;
1006 }
1007 n++;
1008 }
1009
1010 assert(n == size);
1011 }
1012
1013
1014 static void validate( unsigned opcode,
1015 unsigned nr_dst,
1016 unsigned nr_src )
1017 {
1018 #ifdef DEBUG
1019 const struct tgsi_opcode_info *info = tgsi_get_opcode_info( opcode );
1020 assert(info);
1021 if (info) {
1022 assert(nr_dst == info->num_dst);
1023 assert(nr_src == info->num_src);
1024 }
1025 #endif
1026 }
1027
1028 struct ureg_emit_insn_result
1029 ureg_emit_insn(struct ureg_program *ureg,
1030 unsigned opcode,
1031 boolean saturate,
1032 boolean predicate,
1033 boolean pred_negate,
1034 unsigned pred_swizzle_x,
1035 unsigned pred_swizzle_y,
1036 unsigned pred_swizzle_z,
1037 unsigned pred_swizzle_w,
1038 unsigned num_dst,
1039 unsigned num_src )
1040 {
1041 union tgsi_any_token *out;
1042 uint count = predicate ? 2 : 1;
1043 struct ureg_emit_insn_result result;
1044
1045 validate( opcode, num_dst, num_src );
1046
1047 out = get_tokens( ureg, DOMAIN_INSN, count );
1048 out[0].insn = tgsi_default_instruction();
1049 out[0].insn.Opcode = opcode;
1050 out[0].insn.Saturate = saturate;
1051 out[0].insn.NumDstRegs = num_dst;
1052 out[0].insn.NumSrcRegs = num_src;
1053
1054 result.insn_token = ureg->domain[DOMAIN_INSN].count - count;
1055 result.extended_token = result.insn_token;
1056
1057 if (predicate) {
1058 out[0].insn.Predicate = 1;
1059 out[1].insn_predicate = tgsi_default_instruction_predicate();
1060 out[1].insn_predicate.Negate = pred_negate;
1061 out[1].insn_predicate.SwizzleX = pred_swizzle_x;
1062 out[1].insn_predicate.SwizzleY = pred_swizzle_y;
1063 out[1].insn_predicate.SwizzleZ = pred_swizzle_z;
1064 out[1].insn_predicate.SwizzleW = pred_swizzle_w;
1065 }
1066
1067 ureg->nr_instructions++;
1068
1069 return result;
1070 }
1071
1072
1073 /**
1074 * Emit a label token.
1075 * \param label_token returns a token number indicating where the label
1076 * needs to be patched later. Later, this value should be passed to the
1077 * ureg_fixup_label() function.
1078 */
1079 void
1080 ureg_emit_label(struct ureg_program *ureg,
1081 unsigned extended_token,
1082 unsigned *label_token )
1083 {
1084 union tgsi_any_token *out, *insn;
1085
1086 if (!label_token)
1087 return;
1088
1089 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1090 out[0].value = 0;
1091
1092 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1093 insn->insn.Label = 1;
1094
1095 *label_token = ureg->domain[DOMAIN_INSN].count - 1;
1096 }
1097
1098 /* Will return a number which can be used in a label to point to the
1099 * next instruction to be emitted.
1100 */
1101 unsigned
1102 ureg_get_instruction_number( struct ureg_program *ureg )
1103 {
1104 return ureg->nr_instructions;
1105 }
1106
1107 /* Patch a given label (expressed as a token number) to point to a
1108 * given instruction (expressed as an instruction number).
1109 */
1110 void
1111 ureg_fixup_label(struct ureg_program *ureg,
1112 unsigned label_token,
1113 unsigned instruction_number )
1114 {
1115 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, label_token );
1116
1117 out->insn_label.Label = instruction_number;
1118 }
1119
1120
1121 void
1122 ureg_emit_texture(struct ureg_program *ureg,
1123 unsigned extended_token,
1124 unsigned target, unsigned num_offsets)
1125 {
1126 union tgsi_any_token *out, *insn;
1127
1128 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1129 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1130
1131 insn->insn.Texture = 1;
1132
1133 out[0].value = 0;
1134 out[0].insn_texture.Texture = target;
1135 out[0].insn_texture.NumOffsets = num_offsets;
1136 }
1137
1138 void
1139 ureg_emit_texture_offset(struct ureg_program *ureg,
1140 const struct tgsi_texture_offset *offset)
1141 {
1142 union tgsi_any_token *out;
1143
1144 out = get_tokens( ureg, DOMAIN_INSN, 1);
1145
1146 out[0].value = 0;
1147 out[0].insn_texture_offset = *offset;
1148
1149 }
1150
1151
1152 void
1153 ureg_fixup_insn_size(struct ureg_program *ureg,
1154 unsigned insn )
1155 {
1156 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, insn );
1157
1158 assert(out->insn.Type == TGSI_TOKEN_TYPE_INSTRUCTION);
1159 out->insn.NrTokens = ureg->domain[DOMAIN_INSN].count - insn - 1;
1160 }
1161
1162
1163 void
1164 ureg_insn(struct ureg_program *ureg,
1165 unsigned opcode,
1166 const struct ureg_dst *dst,
1167 unsigned nr_dst,
1168 const struct ureg_src *src,
1169 unsigned nr_src )
1170 {
1171 struct ureg_emit_insn_result insn;
1172 unsigned i;
1173 boolean saturate;
1174 boolean predicate;
1175 boolean negate = FALSE;
1176 unsigned swizzle[4] = { 0 };
1177
1178 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1179 return;
1180 }
1181
1182 saturate = nr_dst ? dst[0].Saturate : FALSE;
1183 predicate = nr_dst ? dst[0].Predicate : FALSE;
1184 if (predicate) {
1185 negate = dst[0].PredNegate;
1186 swizzle[0] = dst[0].PredSwizzleX;
1187 swizzle[1] = dst[0].PredSwizzleY;
1188 swizzle[2] = dst[0].PredSwizzleZ;
1189 swizzle[3] = dst[0].PredSwizzleW;
1190 }
1191
1192 insn = ureg_emit_insn(ureg,
1193 opcode,
1194 saturate,
1195 predicate,
1196 negate,
1197 swizzle[0],
1198 swizzle[1],
1199 swizzle[2],
1200 swizzle[3],
1201 nr_dst,
1202 nr_src);
1203
1204 for (i = 0; i < nr_dst; i++)
1205 ureg_emit_dst( ureg, dst[i] );
1206
1207 for (i = 0; i < nr_src; i++)
1208 ureg_emit_src( ureg, src[i] );
1209
1210 ureg_fixup_insn_size( ureg, insn.insn_token );
1211 }
1212
1213 void
1214 ureg_tex_insn(struct ureg_program *ureg,
1215 unsigned opcode,
1216 const struct ureg_dst *dst,
1217 unsigned nr_dst,
1218 unsigned target,
1219 const struct tgsi_texture_offset *texoffsets,
1220 unsigned nr_offset,
1221 const struct ureg_src *src,
1222 unsigned nr_src )
1223 {
1224 struct ureg_emit_insn_result insn;
1225 unsigned i;
1226 boolean saturate;
1227 boolean predicate;
1228 boolean negate = FALSE;
1229 unsigned swizzle[4] = { 0 };
1230
1231 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1232 return;
1233 }
1234
1235 saturate = nr_dst ? dst[0].Saturate : FALSE;
1236 predicate = nr_dst ? dst[0].Predicate : FALSE;
1237 if (predicate) {
1238 negate = dst[0].PredNegate;
1239 swizzle[0] = dst[0].PredSwizzleX;
1240 swizzle[1] = dst[0].PredSwizzleY;
1241 swizzle[2] = dst[0].PredSwizzleZ;
1242 swizzle[3] = dst[0].PredSwizzleW;
1243 }
1244
1245 insn = ureg_emit_insn(ureg,
1246 opcode,
1247 saturate,
1248 predicate,
1249 negate,
1250 swizzle[0],
1251 swizzle[1],
1252 swizzle[2],
1253 swizzle[3],
1254 nr_dst,
1255 nr_src);
1256
1257 ureg_emit_texture( ureg, insn.extended_token, target, nr_offset );
1258
1259 for (i = 0; i < nr_offset; i++)
1260 ureg_emit_texture_offset( ureg, &texoffsets[i]);
1261
1262 for (i = 0; i < nr_dst; i++)
1263 ureg_emit_dst( ureg, dst[i] );
1264
1265 for (i = 0; i < nr_src; i++)
1266 ureg_emit_src( ureg, src[i] );
1267
1268 ureg_fixup_insn_size( ureg, insn.insn_token );
1269 }
1270
1271
1272 void
1273 ureg_label_insn(struct ureg_program *ureg,
1274 unsigned opcode,
1275 const struct ureg_src *src,
1276 unsigned nr_src,
1277 unsigned *label_token )
1278 {
1279 struct ureg_emit_insn_result insn;
1280 unsigned i;
1281
1282 insn = ureg_emit_insn(ureg,
1283 opcode,
1284 FALSE,
1285 FALSE,
1286 FALSE,
1287 TGSI_SWIZZLE_X,
1288 TGSI_SWIZZLE_Y,
1289 TGSI_SWIZZLE_Z,
1290 TGSI_SWIZZLE_W,
1291 0,
1292 nr_src);
1293
1294 ureg_emit_label( ureg, insn.extended_token, label_token );
1295
1296 for (i = 0; i < nr_src; i++)
1297 ureg_emit_src( ureg, src[i] );
1298
1299 ureg_fixup_insn_size( ureg, insn.insn_token );
1300 }
1301
1302
1303 static void
1304 emit_decl_semantic(struct ureg_program *ureg,
1305 unsigned file,
1306 unsigned first,
1307 unsigned last,
1308 unsigned semantic_name,
1309 unsigned semantic_index,
1310 unsigned usage_mask,
1311 unsigned array_id)
1312 {
1313 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3);
1314
1315 out[0].value = 0;
1316 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1317 out[0].decl.NrTokens = 3;
1318 out[0].decl.File = file;
1319 out[0].decl.UsageMask = usage_mask;
1320 out[0].decl.Semantic = 1;
1321 out[0].decl.Array = array_id != 0;
1322
1323 out[1].value = 0;
1324 out[1].decl_range.First = first;
1325 out[1].decl_range.Last = last;
1326
1327 out[2].value = 0;
1328 out[2].decl_semantic.Name = semantic_name;
1329 out[2].decl_semantic.Index = semantic_index;
1330
1331 if (array_id) {
1332 out[3].value = 0;
1333 out[3].array.ArrayID = array_id;
1334 }
1335 }
1336
1337
1338 static void
1339 emit_decl_fs(struct ureg_program *ureg,
1340 unsigned file,
1341 unsigned first,
1342 unsigned last,
1343 unsigned semantic_name,
1344 unsigned semantic_index,
1345 unsigned interpolate,
1346 unsigned cylindrical_wrap,
1347 unsigned interpolate_location,
1348 unsigned array_id)
1349 {
1350 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL,
1351 array_id ? 5 : 4);
1352
1353 out[0].value = 0;
1354 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1355 out[0].decl.NrTokens = 4;
1356 out[0].decl.File = file;
1357 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW; /* FIXME! */
1358 out[0].decl.Interpolate = 1;
1359 out[0].decl.Semantic = 1;
1360 out[0].decl.Array = array_id != 0;
1361
1362 out[1].value = 0;
1363 out[1].decl_range.First = first;
1364 out[1].decl_range.Last = last;
1365
1366 out[2].value = 0;
1367 out[2].decl_interp.Interpolate = interpolate;
1368 out[2].decl_interp.CylindricalWrap = cylindrical_wrap;
1369 out[2].decl_interp.Location = interpolate_location;
1370
1371 out[3].value = 0;
1372 out[3].decl_semantic.Name = semantic_name;
1373 out[3].decl_semantic.Index = semantic_index;
1374
1375 if (array_id) {
1376 out[4].value = 0;
1377 out[4].array.ArrayID = array_id;
1378 }
1379 }
1380
1381 static void
1382 emit_decl_temps( struct ureg_program *ureg,
1383 unsigned first, unsigned last,
1384 boolean local,
1385 unsigned arrayid )
1386 {
1387 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL,
1388 arrayid ? 3 : 2 );
1389
1390 out[0].value = 0;
1391 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1392 out[0].decl.NrTokens = 2;
1393 out[0].decl.File = TGSI_FILE_TEMPORARY;
1394 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1395 out[0].decl.Local = local;
1396
1397 out[1].value = 0;
1398 out[1].decl_range.First = first;
1399 out[1].decl_range.Last = last;
1400
1401 if (arrayid) {
1402 out[0].decl.Array = 1;
1403 out[2].value = 0;
1404 out[2].array.ArrayID = arrayid;
1405 }
1406 }
1407
1408 static void emit_decl_range( struct ureg_program *ureg,
1409 unsigned file,
1410 unsigned first,
1411 unsigned count )
1412 {
1413 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
1414
1415 out[0].value = 0;
1416 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1417 out[0].decl.NrTokens = 2;
1418 out[0].decl.File = file;
1419 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1420 out[0].decl.Semantic = 0;
1421
1422 out[1].value = 0;
1423 out[1].decl_range.First = first;
1424 out[1].decl_range.Last = first + count - 1;
1425 }
1426
1427 static void
1428 emit_decl_range2D(struct ureg_program *ureg,
1429 unsigned file,
1430 unsigned first,
1431 unsigned last,
1432 unsigned index2D)
1433 {
1434 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1435
1436 out[0].value = 0;
1437 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1438 out[0].decl.NrTokens = 3;
1439 out[0].decl.File = file;
1440 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1441 out[0].decl.Dimension = 1;
1442
1443 out[1].value = 0;
1444 out[1].decl_range.First = first;
1445 out[1].decl_range.Last = last;
1446
1447 out[2].value = 0;
1448 out[2].decl_dim.Index2D = index2D;
1449 }
1450
1451 static void
1452 emit_decl_sampler_view(struct ureg_program *ureg,
1453 unsigned index,
1454 unsigned target,
1455 unsigned return_type_x,
1456 unsigned return_type_y,
1457 unsigned return_type_z,
1458 unsigned return_type_w )
1459 {
1460 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1461
1462 out[0].value = 0;
1463 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1464 out[0].decl.NrTokens = 3;
1465 out[0].decl.File = TGSI_FILE_SAMPLER_VIEW;
1466 out[0].decl.UsageMask = 0xf;
1467
1468 out[1].value = 0;
1469 out[1].decl_range.First = index;
1470 out[1].decl_range.Last = index;
1471
1472 out[2].value = 0;
1473 out[2].decl_sampler_view.Resource = target;
1474 out[2].decl_sampler_view.ReturnTypeX = return_type_x;
1475 out[2].decl_sampler_view.ReturnTypeY = return_type_y;
1476 out[2].decl_sampler_view.ReturnTypeZ = return_type_z;
1477 out[2].decl_sampler_view.ReturnTypeW = return_type_w;
1478 }
1479
1480 static void
1481 emit_immediate( struct ureg_program *ureg,
1482 const unsigned *v,
1483 unsigned type )
1484 {
1485 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 5 );
1486
1487 out[0].value = 0;
1488 out[0].imm.Type = TGSI_TOKEN_TYPE_IMMEDIATE;
1489 out[0].imm.NrTokens = 5;
1490 out[0].imm.DataType = type;
1491 out[0].imm.Padding = 0;
1492
1493 out[1].imm_data.Uint = v[0];
1494 out[2].imm_data.Uint = v[1];
1495 out[3].imm_data.Uint = v[2];
1496 out[4].imm_data.Uint = v[3];
1497 }
1498
1499 static void
1500 emit_property(struct ureg_program *ureg,
1501 unsigned name,
1502 unsigned data)
1503 {
1504 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1505
1506 out[0].value = 0;
1507 out[0].prop.Type = TGSI_TOKEN_TYPE_PROPERTY;
1508 out[0].prop.NrTokens = 2;
1509 out[0].prop.PropertyName = name;
1510
1511 out[1].prop_data.Data = data;
1512 }
1513
1514
1515 static void emit_decls( struct ureg_program *ureg )
1516 {
1517 unsigned i,j;
1518
1519 for (i = 0; i < Elements(ureg->properties); i++)
1520 if (ureg->properties[i] != ~0)
1521 emit_property(ureg, i, ureg->properties[i]);
1522
1523 if (ureg->processor == TGSI_PROCESSOR_VERTEX) {
1524 for (i = 0; i < PIPE_MAX_ATTRIBS; i++) {
1525 if (ureg->vs_inputs[i/32] & (1 << (i%32))) {
1526 emit_decl_range( ureg, TGSI_FILE_INPUT, i, 1 );
1527 }
1528 }
1529 } else if (ureg->processor == TGSI_PROCESSOR_FRAGMENT) {
1530 if (ureg->supports_any_inout_decl_range) {
1531 for (i = 0; i < ureg->nr_inputs; i++) {
1532 emit_decl_fs(ureg,
1533 TGSI_FILE_INPUT,
1534 ureg->input[i].first,
1535 ureg->input[i].last,
1536 ureg->input[i].semantic_name,
1537 ureg->input[i].semantic_index,
1538 ureg->input[i].interp,
1539 ureg->input[i].cylindrical_wrap,
1540 ureg->input[i].interp_location,
1541 ureg->input[i].array_id);
1542 }
1543 }
1544 else {
1545 for (i = 0; i < ureg->nr_inputs; i++) {
1546 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1547 emit_decl_fs(ureg,
1548 TGSI_FILE_INPUT,
1549 j, j,
1550 ureg->input[i].semantic_name,
1551 ureg->input[i].semantic_index +
1552 (j - ureg->input[i].first),
1553 ureg->input[i].interp,
1554 ureg->input[i].cylindrical_wrap,
1555 ureg->input[i].interp_location, 0);
1556 }
1557 }
1558 }
1559 } else {
1560 if (ureg->supports_any_inout_decl_range) {
1561 for (i = 0; i < ureg->nr_inputs; i++) {
1562 emit_decl_semantic(ureg,
1563 TGSI_FILE_INPUT,
1564 ureg->input[i].first,
1565 ureg->input[i].last,
1566 ureg->input[i].semantic_name,
1567 ureg->input[i].semantic_index,
1568 TGSI_WRITEMASK_XYZW,
1569 ureg->input[i].array_id);
1570 }
1571 }
1572 else {
1573 for (i = 0; i < ureg->nr_inputs; i++) {
1574 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1575 emit_decl_semantic(ureg,
1576 TGSI_FILE_INPUT,
1577 j, j,
1578 ureg->input[i].semantic_name,
1579 ureg->input[i].semantic_index +
1580 (j - ureg->input[i].first),
1581 TGSI_WRITEMASK_XYZW, 0);
1582 }
1583 }
1584 }
1585 }
1586
1587 for (i = 0; i < ureg->nr_system_values; i++) {
1588 emit_decl_semantic(ureg,
1589 TGSI_FILE_SYSTEM_VALUE,
1590 ureg->system_value[i].index,
1591 ureg->system_value[i].index,
1592 ureg->system_value[i].semantic_name,
1593 ureg->system_value[i].semantic_index,
1594 TGSI_WRITEMASK_XYZW, 0);
1595 }
1596
1597 if (ureg->supports_any_inout_decl_range) {
1598 for (i = 0; i < ureg->nr_outputs; i++) {
1599 emit_decl_semantic(ureg,
1600 TGSI_FILE_OUTPUT,
1601 ureg->output[i].first,
1602 ureg->output[i].last,
1603 ureg->output[i].semantic_name,
1604 ureg->output[i].semantic_index,
1605 ureg->output[i].usage_mask,
1606 ureg->output[i].array_id);
1607 }
1608 }
1609 else {
1610 for (i = 0; i < ureg->nr_outputs; i++) {
1611 for (j = ureg->output[i].first; j <= ureg->output[i].last; j++) {
1612 emit_decl_semantic(ureg,
1613 TGSI_FILE_OUTPUT,
1614 j, j,
1615 ureg->output[i].semantic_name,
1616 ureg->output[i].semantic_index +
1617 (j - ureg->output[i].first),
1618 ureg->output[i].usage_mask, 0);
1619 }
1620 }
1621 }
1622
1623 for (i = 0; i < ureg->nr_samplers; i++) {
1624 emit_decl_range( ureg,
1625 TGSI_FILE_SAMPLER,
1626 ureg->sampler[i].Index, 1 );
1627 }
1628
1629 for (i = 0; i < ureg->nr_sampler_views; i++) {
1630 emit_decl_sampler_view(ureg,
1631 ureg->sampler_view[i].index,
1632 ureg->sampler_view[i].target,
1633 ureg->sampler_view[i].return_type_x,
1634 ureg->sampler_view[i].return_type_y,
1635 ureg->sampler_view[i].return_type_z,
1636 ureg->sampler_view[i].return_type_w);
1637 }
1638
1639 if (ureg->const_decls.nr_constant_ranges) {
1640 for (i = 0; i < ureg->const_decls.nr_constant_ranges; i++) {
1641 emit_decl_range(ureg,
1642 TGSI_FILE_CONSTANT,
1643 ureg->const_decls.constant_range[i].first,
1644 ureg->const_decls.constant_range[i].last - ureg->const_decls.constant_range[i].first + 1);
1645 }
1646 }
1647
1648 for (i = 0; i < PIPE_MAX_CONSTANT_BUFFERS; i++) {
1649 struct const_decl *decl = &ureg->const_decls2D[i];
1650
1651 if (decl->nr_constant_ranges) {
1652 uint j;
1653
1654 for (j = 0; j < decl->nr_constant_ranges; j++) {
1655 emit_decl_range2D(ureg,
1656 TGSI_FILE_CONSTANT,
1657 decl->constant_range[j].first,
1658 decl->constant_range[j].last,
1659 i);
1660 }
1661 }
1662 }
1663
1664 if (ureg->nr_temps) {
1665 unsigned array = 0;
1666 for (i = 0; i < ureg->nr_temps;) {
1667 boolean local = util_bitmask_get(ureg->local_temps, i);
1668 unsigned first = i;
1669 i = util_bitmask_get_next_index(ureg->decl_temps, i + 1);
1670 if (i == UTIL_BITMASK_INVALID_INDEX)
1671 i = ureg->nr_temps;
1672
1673 if (array < ureg->nr_array_temps && ureg->array_temps[array] == first)
1674 emit_decl_temps( ureg, first, i - 1, local, ++array );
1675 else
1676 emit_decl_temps( ureg, first, i - 1, local, 0 );
1677 }
1678 }
1679
1680 if (ureg->nr_addrs) {
1681 emit_decl_range( ureg,
1682 TGSI_FILE_ADDRESS,
1683 0, ureg->nr_addrs );
1684 }
1685
1686 if (ureg->nr_preds) {
1687 emit_decl_range(ureg,
1688 TGSI_FILE_PREDICATE,
1689 0,
1690 ureg->nr_preds);
1691 }
1692
1693 for (i = 0; i < ureg->nr_immediates; i++) {
1694 emit_immediate( ureg,
1695 ureg->immediate[i].value.u,
1696 ureg->immediate[i].type );
1697 }
1698 }
1699
1700 /* Append the instruction tokens onto the declarations to build a
1701 * contiguous stream suitable to send to the driver.
1702 */
1703 static void copy_instructions( struct ureg_program *ureg )
1704 {
1705 unsigned nr_tokens = ureg->domain[DOMAIN_INSN].count;
1706 union tgsi_any_token *out = get_tokens( ureg,
1707 DOMAIN_DECL,
1708 nr_tokens );
1709
1710 memcpy(out,
1711 ureg->domain[DOMAIN_INSN].tokens,
1712 nr_tokens * sizeof out[0] );
1713 }
1714
1715
1716 static void
1717 fixup_header_size(struct ureg_program *ureg)
1718 {
1719 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_DECL, 0 );
1720
1721 out->header.BodySize = ureg->domain[DOMAIN_DECL].count - 2;
1722 }
1723
1724
1725 static void
1726 emit_header( struct ureg_program *ureg )
1727 {
1728 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
1729
1730 out[0].header.HeaderSize = 2;
1731 out[0].header.BodySize = 0;
1732
1733 out[1].processor.Processor = ureg->processor;
1734 out[1].processor.Padding = 0;
1735 }
1736
1737
1738 const struct tgsi_token *ureg_finalize( struct ureg_program *ureg )
1739 {
1740 const struct tgsi_token *tokens;
1741
1742 emit_header( ureg );
1743 emit_decls( ureg );
1744 copy_instructions( ureg );
1745 fixup_header_size( ureg );
1746
1747 if (ureg->domain[0].tokens == error_tokens ||
1748 ureg->domain[1].tokens == error_tokens) {
1749 debug_printf("%s: error in generated shader\n", __FUNCTION__);
1750 assert(0);
1751 return NULL;
1752 }
1753
1754 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
1755
1756 if (0) {
1757 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__,
1758 ureg->domain[DOMAIN_DECL].count);
1759 tgsi_dump( tokens, 0 );
1760 }
1761
1762 #if DEBUG
1763 if (tokens && !tgsi_sanity_check(tokens)) {
1764 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
1765 tgsi_dump(tokens, 0);
1766 assert(0);
1767 }
1768 #endif
1769
1770
1771 return tokens;
1772 }
1773
1774
1775 void *ureg_create_shader( struct ureg_program *ureg,
1776 struct pipe_context *pipe,
1777 const struct pipe_stream_output_info *so )
1778 {
1779 struct pipe_shader_state state;
1780
1781 state.tokens = ureg_finalize(ureg);
1782 if(!state.tokens)
1783 return NULL;
1784
1785 if (so)
1786 state.stream_output = *so;
1787 else
1788 memset(&state.stream_output, 0, sizeof(state.stream_output));
1789
1790 switch (ureg->processor) {
1791 case TGSI_PROCESSOR_VERTEX:
1792 return pipe->create_vs_state(pipe, &state);
1793 case TGSI_PROCESSOR_TESS_CTRL:
1794 return pipe->create_tcs_state(pipe, &state);
1795 case TGSI_PROCESSOR_TESS_EVAL:
1796 return pipe->create_tes_state(pipe, &state);
1797 case TGSI_PROCESSOR_GEOMETRY:
1798 return pipe->create_gs_state(pipe, &state);
1799 case TGSI_PROCESSOR_FRAGMENT:
1800 return pipe->create_fs_state(pipe, &state);
1801 default:
1802 return NULL;
1803 }
1804 }
1805
1806
1807 const struct tgsi_token *ureg_get_tokens( struct ureg_program *ureg,
1808 unsigned *nr_tokens )
1809 {
1810 const struct tgsi_token *tokens;
1811
1812 ureg_finalize(ureg);
1813
1814 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
1815
1816 if (nr_tokens)
1817 *nr_tokens = ureg->domain[DOMAIN_DECL].size;
1818
1819 ureg->domain[DOMAIN_DECL].tokens = 0;
1820 ureg->domain[DOMAIN_DECL].size = 0;
1821 ureg->domain[DOMAIN_DECL].order = 0;
1822 ureg->domain[DOMAIN_DECL].count = 0;
1823
1824 return tokens;
1825 }
1826
1827
1828 void ureg_free_tokens( const struct tgsi_token *tokens )
1829 {
1830 FREE((struct tgsi_token *)tokens);
1831 }
1832
1833
1834 struct ureg_program *
1835 ureg_create(unsigned processor)
1836 {
1837 return ureg_create_with_screen(processor, NULL);
1838 }
1839
1840
1841 struct ureg_program *
1842 ureg_create_with_screen(unsigned processor, struct pipe_screen *screen)
1843 {
1844 int i;
1845 struct ureg_program *ureg = CALLOC_STRUCT( ureg_program );
1846 if (ureg == NULL)
1847 goto no_ureg;
1848
1849 ureg->processor = processor;
1850 ureg->supports_any_inout_decl_range =
1851 screen &&
1852 screen->get_shader_param(screen,
1853 util_pipe_shader_from_tgsi_processor(processor),
1854 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE) != 0;
1855
1856 for (i = 0; i < Elements(ureg->properties); i++)
1857 ureg->properties[i] = ~0;
1858
1859 ureg->free_temps = util_bitmask_create();
1860 if (ureg->free_temps == NULL)
1861 goto no_free_temps;
1862
1863 ureg->local_temps = util_bitmask_create();
1864 if (ureg->local_temps == NULL)
1865 goto no_local_temps;
1866
1867 ureg->decl_temps = util_bitmask_create();
1868 if (ureg->decl_temps == NULL)
1869 goto no_decl_temps;
1870
1871 return ureg;
1872
1873 no_decl_temps:
1874 util_bitmask_destroy(ureg->local_temps);
1875 no_local_temps:
1876 util_bitmask_destroy(ureg->free_temps);
1877 no_free_temps:
1878 FREE(ureg);
1879 no_ureg:
1880 return NULL;
1881 }
1882
1883
1884 unsigned
1885 ureg_get_nr_outputs( const struct ureg_program *ureg )
1886 {
1887 if (!ureg)
1888 return 0;
1889 return ureg->nr_outputs;
1890 }
1891
1892
1893 void ureg_destroy( struct ureg_program *ureg )
1894 {
1895 unsigned i;
1896
1897 for (i = 0; i < Elements(ureg->domain); i++) {
1898 if (ureg->domain[i].tokens &&
1899 ureg->domain[i].tokens != error_tokens)
1900 FREE(ureg->domain[i].tokens);
1901 }
1902
1903 util_bitmask_destroy(ureg->free_temps);
1904 util_bitmask_destroy(ureg->local_temps);
1905 util_bitmask_destroy(ureg->decl_temps);
1906
1907 FREE(ureg);
1908 }