st/mesa,tgsi: use enum tgsi_opcode
[mesa.git] / src / gallium / auxiliary / tgsi / tgsi_ureg.c
1 /**************************************************************************
2 *
3 * Copyright 2009-2010 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_info.h"
35 #include "tgsi/tgsi_dump.h"
36 #include "tgsi/tgsi_sanity.h"
37 #include "util/u_debug.h"
38 #include "util/u_inlines.h"
39 #include "util/u_memory.h"
40 #include "util/u_math.h"
41 #include "util/u_bitmask.h"
42
43 union tgsi_any_token {
44 struct tgsi_header header;
45 struct tgsi_processor processor;
46 struct tgsi_token token;
47 struct tgsi_property prop;
48 struct tgsi_property_data prop_data;
49 struct tgsi_declaration decl;
50 struct tgsi_declaration_range decl_range;
51 struct tgsi_declaration_dimension decl_dim;
52 struct tgsi_declaration_interp decl_interp;
53 struct tgsi_declaration_image decl_image;
54 struct tgsi_declaration_semantic decl_semantic;
55 struct tgsi_declaration_sampler_view decl_sampler_view;
56 struct tgsi_declaration_array array;
57 struct tgsi_immediate imm;
58 union tgsi_immediate_data imm_data;
59 struct tgsi_instruction insn;
60 struct tgsi_instruction_label insn_label;
61 struct tgsi_instruction_texture insn_texture;
62 struct tgsi_instruction_memory insn_memory;
63 struct tgsi_texture_offset insn_texture_offset;
64 struct tgsi_src_register src;
65 struct tgsi_ind_register ind;
66 struct tgsi_dimension dim;
67 struct tgsi_dst_register dst;
68 unsigned value;
69 };
70
71
72 struct ureg_tokens {
73 union tgsi_any_token *tokens;
74 unsigned size;
75 unsigned order;
76 unsigned count;
77 };
78
79 #define UREG_MAX_INPUT (4 * PIPE_MAX_SHADER_INPUTS)
80 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
81 #define UREG_MAX_OUTPUT (4 * PIPE_MAX_SHADER_OUTPUTS)
82 #define UREG_MAX_CONSTANT_RANGE 32
83 #define UREG_MAX_HW_ATOMIC_RANGE 32
84 #define UREG_MAX_IMMEDIATE 4096
85 #define UREG_MAX_ADDR 3
86 #define UREG_MAX_ARRAY_TEMPS 256
87
88 struct const_decl {
89 struct {
90 unsigned first;
91 unsigned last;
92 } constant_range[UREG_MAX_CONSTANT_RANGE];
93 unsigned nr_constant_ranges;
94 };
95
96 struct hw_atomic_decl {
97 struct {
98 unsigned first;
99 unsigned last;
100 unsigned array_id;
101 } hw_atomic_range[UREG_MAX_HW_ATOMIC_RANGE];
102 unsigned nr_hw_atomic_ranges;
103 };
104
105 #define DOMAIN_DECL 0
106 #define DOMAIN_INSN 1
107
108 struct ureg_program
109 {
110 enum pipe_shader_type processor;
111 bool supports_any_inout_decl_range;
112 int next_shader_processor;
113
114 struct {
115 enum tgsi_semantic semantic_name;
116 unsigned semantic_index;
117 enum tgsi_interpolate_mode interp;
118 unsigned char cylindrical_wrap;
119 unsigned char usage_mask;
120 enum tgsi_interpolate_loc interp_location;
121 unsigned first;
122 unsigned last;
123 unsigned array_id;
124 } input[UREG_MAX_INPUT];
125 unsigned nr_inputs, nr_input_regs;
126
127 unsigned vs_inputs[PIPE_MAX_ATTRIBS/32];
128
129 struct {
130 enum tgsi_semantic semantic_name;
131 unsigned semantic_index;
132 } system_value[UREG_MAX_SYSTEM_VALUE];
133 unsigned nr_system_values;
134
135 struct {
136 enum tgsi_semantic semantic_name;
137 unsigned semantic_index;
138 unsigned streams;
139 unsigned usage_mask; /* = TGSI_WRITEMASK_* */
140 unsigned first;
141 unsigned last;
142 unsigned array_id;
143 } output[UREG_MAX_OUTPUT];
144 unsigned nr_outputs, nr_output_regs;
145
146 struct {
147 union {
148 float f[4];
149 unsigned u[4];
150 int i[4];
151 } value;
152 unsigned nr;
153 unsigned type;
154 } immediate[UREG_MAX_IMMEDIATE];
155 unsigned nr_immediates;
156
157 struct ureg_src sampler[PIPE_MAX_SAMPLERS];
158 unsigned nr_samplers;
159
160 struct {
161 unsigned index;
162 enum tgsi_texture_type target;
163 enum tgsi_return_type return_type_x;
164 enum tgsi_return_type return_type_y;
165 enum tgsi_return_type return_type_z;
166 enum tgsi_return_type return_type_w;
167 } sampler_view[PIPE_MAX_SHADER_SAMPLER_VIEWS];
168 unsigned nr_sampler_views;
169
170 struct {
171 unsigned index;
172 enum tgsi_texture_type target;
173 unsigned format;
174 boolean wr;
175 boolean raw;
176 } image[PIPE_MAX_SHADER_IMAGES];
177 unsigned nr_images;
178
179 struct {
180 unsigned index;
181 bool atomic;
182 } buffer[PIPE_MAX_SHADER_BUFFERS];
183 unsigned nr_buffers;
184
185 struct util_bitmask *free_temps;
186 struct util_bitmask *local_temps;
187 struct util_bitmask *decl_temps;
188 unsigned nr_temps;
189
190 unsigned array_temps[UREG_MAX_ARRAY_TEMPS];
191 unsigned nr_array_temps;
192
193 struct const_decl const_decls[PIPE_MAX_CONSTANT_BUFFERS];
194
195 struct hw_atomic_decl hw_atomic_decls[PIPE_MAX_HW_ATOMIC_BUFFERS];
196
197 unsigned properties[TGSI_PROPERTY_COUNT];
198
199 unsigned nr_addrs;
200 unsigned nr_instructions;
201
202 struct ureg_tokens domain[2];
203
204 bool use_memory[TGSI_MEMORY_TYPE_COUNT];
205 };
206
207 static union tgsi_any_token error_tokens[32];
208
209 static void tokens_error( struct ureg_tokens *tokens )
210 {
211 if (tokens->tokens && tokens->tokens != error_tokens)
212 FREE(tokens->tokens);
213
214 tokens->tokens = error_tokens;
215 tokens->size = ARRAY_SIZE(error_tokens);
216 tokens->count = 0;
217 }
218
219
220 static void tokens_expand( struct ureg_tokens *tokens,
221 unsigned count )
222 {
223 unsigned old_size = tokens->size * sizeof(unsigned);
224
225 if (tokens->tokens == error_tokens) {
226 return;
227 }
228
229 while (tokens->count + count > tokens->size) {
230 tokens->size = (1 << ++tokens->order);
231 }
232
233 tokens->tokens = REALLOC(tokens->tokens,
234 old_size,
235 tokens->size * sizeof(unsigned));
236 if (tokens->tokens == NULL) {
237 tokens_error(tokens);
238 }
239 }
240
241 static void set_bad( struct ureg_program *ureg )
242 {
243 tokens_error(&ureg->domain[0]);
244 }
245
246
247
248 static union tgsi_any_token *get_tokens( struct ureg_program *ureg,
249 unsigned domain,
250 unsigned count )
251 {
252 struct ureg_tokens *tokens = &ureg->domain[domain];
253 union tgsi_any_token *result;
254
255 if (tokens->count + count > tokens->size)
256 tokens_expand(tokens, count);
257
258 result = &tokens->tokens[tokens->count];
259 tokens->count += count;
260 return result;
261 }
262
263
264 static union tgsi_any_token *retrieve_token( struct ureg_program *ureg,
265 unsigned domain,
266 unsigned nr )
267 {
268 if (ureg->domain[domain].tokens == error_tokens)
269 return &error_tokens[0];
270
271 return &ureg->domain[domain].tokens[nr];
272 }
273
274
275 void
276 ureg_property(struct ureg_program *ureg, unsigned name, unsigned value)
277 {
278 assert(name < ARRAY_SIZE(ureg->properties));
279 ureg->properties[name] = value;
280 }
281
282 struct ureg_src
283 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program *ureg,
284 enum tgsi_semantic semantic_name,
285 unsigned semantic_index,
286 enum tgsi_interpolate_mode interp_mode,
287 unsigned cylindrical_wrap,
288 enum tgsi_interpolate_loc interp_location,
289 unsigned index,
290 unsigned usage_mask,
291 unsigned array_id,
292 unsigned array_size)
293 {
294 unsigned i;
295
296 assert(usage_mask != 0);
297 assert(usage_mask <= TGSI_WRITEMASK_XYZW);
298
299 for (i = 0; i < ureg->nr_inputs; i++) {
300 if (ureg->input[i].semantic_name == semantic_name &&
301 ureg->input[i].semantic_index == semantic_index) {
302 assert(ureg->input[i].interp == interp_mode);
303 assert(ureg->input[i].cylindrical_wrap == cylindrical_wrap);
304 assert(ureg->input[i].interp_location == interp_location);
305 if (ureg->input[i].array_id == array_id) {
306 ureg->input[i].usage_mask |= usage_mask;
307 goto out;
308 }
309 assert((ureg->input[i].usage_mask & usage_mask) == 0);
310 }
311 }
312
313 if (ureg->nr_inputs < UREG_MAX_INPUT) {
314 assert(array_size >= 1);
315 ureg->input[i].semantic_name = semantic_name;
316 ureg->input[i].semantic_index = semantic_index;
317 ureg->input[i].interp = interp_mode;
318 ureg->input[i].cylindrical_wrap = cylindrical_wrap;
319 ureg->input[i].interp_location = interp_location;
320 ureg->input[i].first = index;
321 ureg->input[i].last = index + array_size - 1;
322 ureg->input[i].array_id = array_id;
323 ureg->input[i].usage_mask = usage_mask;
324 ureg->nr_input_regs = MAX2(ureg->nr_input_regs, index + array_size);
325 ureg->nr_inputs++;
326 } else {
327 set_bad(ureg);
328 }
329
330 out:
331 return ureg_src_array_register(TGSI_FILE_INPUT, ureg->input[i].first,
332 array_id);
333 }
334
335 struct ureg_src
336 ureg_DECL_fs_input_cyl_centroid(struct ureg_program *ureg,
337 enum tgsi_semantic semantic_name,
338 unsigned semantic_index,
339 enum tgsi_interpolate_mode interp_mode,
340 unsigned cylindrical_wrap,
341 enum tgsi_interpolate_loc interp_location,
342 unsigned array_id,
343 unsigned array_size)
344 {
345 return ureg_DECL_fs_input_cyl_centroid_layout(ureg,
346 semantic_name, semantic_index, interp_mode,
347 cylindrical_wrap, interp_location,
348 ureg->nr_input_regs, TGSI_WRITEMASK_XYZW, array_id, array_size);
349 }
350
351
352 struct ureg_src
353 ureg_DECL_vs_input( struct ureg_program *ureg,
354 unsigned index )
355 {
356 assert(ureg->processor == PIPE_SHADER_VERTEX);
357 assert(index / 32 < ARRAY_SIZE(ureg->vs_inputs));
358
359 ureg->vs_inputs[index/32] |= 1 << (index % 32);
360 return ureg_src_register( TGSI_FILE_INPUT, index );
361 }
362
363
364 struct ureg_src
365 ureg_DECL_input_layout(struct ureg_program *ureg,
366 enum tgsi_semantic semantic_name,
367 unsigned semantic_index,
368 unsigned index,
369 unsigned usage_mask,
370 unsigned array_id,
371 unsigned array_size)
372 {
373 return ureg_DECL_fs_input_cyl_centroid_layout(ureg,
374 semantic_name, semantic_index,
375 TGSI_INTERPOLATE_CONSTANT, 0, TGSI_INTERPOLATE_LOC_CENTER,
376 index, usage_mask, array_id, array_size);
377 }
378
379
380 struct ureg_src
381 ureg_DECL_input(struct ureg_program *ureg,
382 enum tgsi_semantic semantic_name,
383 unsigned semantic_index,
384 unsigned array_id,
385 unsigned array_size)
386 {
387 return ureg_DECL_fs_input_cyl_centroid(ureg, semantic_name, semantic_index,
388 TGSI_INTERPOLATE_CONSTANT, 0,
389 TGSI_INTERPOLATE_LOC_CENTER,
390 array_id, array_size);
391 }
392
393
394 struct ureg_src
395 ureg_DECL_system_value(struct ureg_program *ureg,
396 enum tgsi_semantic semantic_name,
397 unsigned semantic_index)
398 {
399 unsigned i;
400
401 for (i = 0; i < ureg->nr_system_values; i++) {
402 if (ureg->system_value[i].semantic_name == semantic_name &&
403 ureg->system_value[i].semantic_index == semantic_index) {
404 goto out;
405 }
406 }
407
408 if (ureg->nr_system_values < UREG_MAX_SYSTEM_VALUE) {
409 ureg->system_value[ureg->nr_system_values].semantic_name = semantic_name;
410 ureg->system_value[ureg->nr_system_values].semantic_index = semantic_index;
411 i = ureg->nr_system_values;
412 ureg->nr_system_values++;
413 } else {
414 set_bad(ureg);
415 }
416
417 out:
418 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE, i);
419 }
420
421
422 struct ureg_dst
423 ureg_DECL_output_layout(struct ureg_program *ureg,
424 enum tgsi_semantic semantic_name,
425 unsigned semantic_index,
426 unsigned streams,
427 unsigned index,
428 unsigned usage_mask,
429 unsigned array_id,
430 unsigned array_size)
431 {
432 unsigned i;
433
434 assert(usage_mask != 0);
435 assert(!(streams & 0x03) || (usage_mask & 1));
436 assert(!(streams & 0x0c) || (usage_mask & 2));
437 assert(!(streams & 0x30) || (usage_mask & 4));
438 assert(!(streams & 0xc0) || (usage_mask & 8));
439
440 for (i = 0; i < ureg->nr_outputs; i++) {
441 if (ureg->output[i].semantic_name == semantic_name &&
442 ureg->output[i].semantic_index == semantic_index) {
443 if (ureg->output[i].array_id == array_id) {
444 ureg->output[i].usage_mask |= usage_mask;
445 goto out;
446 }
447 assert((ureg->output[i].usage_mask & usage_mask) == 0);
448 }
449 }
450
451 if (ureg->nr_outputs < UREG_MAX_OUTPUT) {
452 ureg->output[i].semantic_name = semantic_name;
453 ureg->output[i].semantic_index = semantic_index;
454 ureg->output[i].usage_mask = usage_mask;
455 ureg->output[i].first = index;
456 ureg->output[i].last = index + array_size - 1;
457 ureg->output[i].array_id = array_id;
458 ureg->nr_output_regs = MAX2(ureg->nr_output_regs, index + array_size);
459 ureg->nr_outputs++;
460 }
461 else {
462 set_bad( ureg );
463 i = 0;
464 }
465
466 out:
467 ureg->output[i].streams |= streams;
468
469 return ureg_dst_array_register(TGSI_FILE_OUTPUT, ureg->output[i].first,
470 array_id);
471 }
472
473
474 struct ureg_dst
475 ureg_DECL_output_masked(struct ureg_program *ureg,
476 unsigned name,
477 unsigned index,
478 unsigned usage_mask,
479 unsigned array_id,
480 unsigned array_size)
481 {
482 return ureg_DECL_output_layout(ureg, name, index, 0,
483 ureg->nr_output_regs, usage_mask, array_id, array_size);
484 }
485
486
487 struct ureg_dst
488 ureg_DECL_output(struct ureg_program *ureg,
489 enum tgsi_semantic name,
490 unsigned index)
491 {
492 return ureg_DECL_output_masked(ureg, name, index, TGSI_WRITEMASK_XYZW,
493 0, 1);
494 }
495
496 struct ureg_dst
497 ureg_DECL_output_array(struct ureg_program *ureg,
498 enum tgsi_semantic semantic_name,
499 unsigned semantic_index,
500 unsigned array_id,
501 unsigned array_size)
502 {
503 return ureg_DECL_output_masked(ureg, semantic_name, semantic_index,
504 TGSI_WRITEMASK_XYZW,
505 array_id, array_size);
506 }
507
508
509 /* Returns a new constant register. Keep track of which have been
510 * referred to so that we can emit decls later.
511 *
512 * Constant operands declared with this function must be addressed
513 * with a two-dimensional index.
514 *
515 * There is nothing in this code to bind this constant to any tracked
516 * value or manage any constant_buffer contents -- that's the
517 * resposibility of the calling code.
518 */
519 void
520 ureg_DECL_constant2D(struct ureg_program *ureg,
521 unsigned first,
522 unsigned last,
523 unsigned index2D)
524 {
525 struct const_decl *decl = &ureg->const_decls[index2D];
526
527 assert(index2D < PIPE_MAX_CONSTANT_BUFFERS);
528
529 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
530 uint i = decl->nr_constant_ranges++;
531
532 decl->constant_range[i].first = first;
533 decl->constant_range[i].last = last;
534 }
535 }
536
537
538 /* A one-dimensional, deprecated version of ureg_DECL_constant2D().
539 *
540 * Constant operands declared with this function must be addressed
541 * with a one-dimensional index.
542 */
543 struct ureg_src
544 ureg_DECL_constant(struct ureg_program *ureg,
545 unsigned index)
546 {
547 struct const_decl *decl = &ureg->const_decls[0];
548 unsigned minconst = index, maxconst = index;
549 unsigned i;
550
551 /* Inside existing range?
552 */
553 for (i = 0; i < decl->nr_constant_ranges; i++) {
554 if (decl->constant_range[i].first <= index &&
555 decl->constant_range[i].last >= index) {
556 goto out;
557 }
558 }
559
560 /* Extend existing range?
561 */
562 for (i = 0; i < decl->nr_constant_ranges; i++) {
563 if (decl->constant_range[i].last == index - 1) {
564 decl->constant_range[i].last = index;
565 goto out;
566 }
567
568 if (decl->constant_range[i].first == index + 1) {
569 decl->constant_range[i].first = index;
570 goto out;
571 }
572
573 minconst = MIN2(minconst, decl->constant_range[i].first);
574 maxconst = MAX2(maxconst, decl->constant_range[i].last);
575 }
576
577 /* Create new range?
578 */
579 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
580 i = decl->nr_constant_ranges++;
581 decl->constant_range[i].first = index;
582 decl->constant_range[i].last = index;
583 goto out;
584 }
585
586 /* Collapse all ranges down to one:
587 */
588 i = 0;
589 decl->constant_range[0].first = minconst;
590 decl->constant_range[0].last = maxconst;
591 decl->nr_constant_ranges = 1;
592
593 out:
594 assert(i < decl->nr_constant_ranges);
595 assert(decl->constant_range[i].first <= index);
596 assert(decl->constant_range[i].last >= index);
597
598 struct ureg_src src = ureg_src_register(TGSI_FILE_CONSTANT, index);
599 return ureg_src_dimension(src, 0);
600 }
601
602
603 /* Returns a new hw atomic register. Keep track of which have been
604 * referred to so that we can emit decls later.
605 */
606 void
607 ureg_DECL_hw_atomic(struct ureg_program *ureg,
608 unsigned first,
609 unsigned last,
610 unsigned buffer_id,
611 unsigned array_id)
612 {
613 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[buffer_id];
614
615 if (decl->nr_hw_atomic_ranges < UREG_MAX_HW_ATOMIC_RANGE) {
616 uint i = decl->nr_hw_atomic_ranges++;
617
618 decl->hw_atomic_range[i].first = first;
619 decl->hw_atomic_range[i].last = last;
620 decl->hw_atomic_range[i].array_id = array_id;
621 } else {
622 set_bad(ureg);
623 }
624 }
625
626 static struct ureg_dst alloc_temporary( struct ureg_program *ureg,
627 boolean local )
628 {
629 unsigned i;
630
631 /* Look for a released temporary.
632 */
633 for (i = util_bitmask_get_first_index(ureg->free_temps);
634 i != UTIL_BITMASK_INVALID_INDEX;
635 i = util_bitmask_get_next_index(ureg->free_temps, i + 1)) {
636 if (util_bitmask_get(ureg->local_temps, i) == local)
637 break;
638 }
639
640 /* Or allocate a new one.
641 */
642 if (i == UTIL_BITMASK_INVALID_INDEX) {
643 i = ureg->nr_temps++;
644
645 if (local)
646 util_bitmask_set(ureg->local_temps, i);
647
648 /* Start a new declaration when the local flag changes */
649 if (!i || util_bitmask_get(ureg->local_temps, i - 1) != local)
650 util_bitmask_set(ureg->decl_temps, i);
651 }
652
653 util_bitmask_clear(ureg->free_temps, i);
654
655 return ureg_dst_register( TGSI_FILE_TEMPORARY, i );
656 }
657
658 struct ureg_dst ureg_DECL_temporary( struct ureg_program *ureg )
659 {
660 return alloc_temporary(ureg, FALSE);
661 }
662
663 struct ureg_dst ureg_DECL_local_temporary( struct ureg_program *ureg )
664 {
665 return alloc_temporary(ureg, TRUE);
666 }
667
668 struct ureg_dst ureg_DECL_array_temporary( struct ureg_program *ureg,
669 unsigned size,
670 boolean local )
671 {
672 unsigned i = ureg->nr_temps;
673 struct ureg_dst dst = ureg_dst_register( TGSI_FILE_TEMPORARY, i );
674
675 if (local)
676 util_bitmask_set(ureg->local_temps, i);
677
678 /* Always start a new declaration at the start */
679 util_bitmask_set(ureg->decl_temps, i);
680
681 ureg->nr_temps += size;
682
683 /* and also at the end of the array */
684 util_bitmask_set(ureg->decl_temps, ureg->nr_temps);
685
686 if (ureg->nr_array_temps < UREG_MAX_ARRAY_TEMPS) {
687 ureg->array_temps[ureg->nr_array_temps++] = i;
688 dst.ArrayID = ureg->nr_array_temps;
689 }
690
691 return dst;
692 }
693
694 void ureg_release_temporary( struct ureg_program *ureg,
695 struct ureg_dst tmp )
696 {
697 if(tmp.File == TGSI_FILE_TEMPORARY)
698 util_bitmask_set(ureg->free_temps, tmp.Index);
699 }
700
701
702 /* Allocate a new address register.
703 */
704 struct ureg_dst ureg_DECL_address( struct ureg_program *ureg )
705 {
706 if (ureg->nr_addrs < UREG_MAX_ADDR)
707 return ureg_dst_register( TGSI_FILE_ADDRESS, ureg->nr_addrs++ );
708
709 assert( 0 );
710 return ureg_dst_register( TGSI_FILE_ADDRESS, 0 );
711 }
712
713 /* Allocate a new sampler.
714 */
715 struct ureg_src ureg_DECL_sampler( struct ureg_program *ureg,
716 unsigned nr )
717 {
718 unsigned i;
719
720 for (i = 0; i < ureg->nr_samplers; i++)
721 if (ureg->sampler[i].Index == nr)
722 return ureg->sampler[i];
723
724 if (i < PIPE_MAX_SAMPLERS) {
725 ureg->sampler[i] = ureg_src_register( TGSI_FILE_SAMPLER, nr );
726 ureg->nr_samplers++;
727 return ureg->sampler[i];
728 }
729
730 assert( 0 );
731 return ureg->sampler[0];
732 }
733
734 /*
735 * Allocate a new shader sampler view.
736 */
737 struct ureg_src
738 ureg_DECL_sampler_view(struct ureg_program *ureg,
739 unsigned index,
740 enum tgsi_texture_type target,
741 enum tgsi_return_type return_type_x,
742 enum tgsi_return_type return_type_y,
743 enum tgsi_return_type return_type_z,
744 enum tgsi_return_type return_type_w)
745 {
746 struct ureg_src reg = ureg_src_register(TGSI_FILE_SAMPLER_VIEW, index);
747 uint i;
748
749 for (i = 0; i < ureg->nr_sampler_views; i++) {
750 if (ureg->sampler_view[i].index == index) {
751 return reg;
752 }
753 }
754
755 if (i < PIPE_MAX_SHADER_SAMPLER_VIEWS) {
756 ureg->sampler_view[i].index = index;
757 ureg->sampler_view[i].target = target;
758 ureg->sampler_view[i].return_type_x = return_type_x;
759 ureg->sampler_view[i].return_type_y = return_type_y;
760 ureg->sampler_view[i].return_type_z = return_type_z;
761 ureg->sampler_view[i].return_type_w = return_type_w;
762 ureg->nr_sampler_views++;
763 return reg;
764 }
765
766 assert(0);
767 return reg;
768 }
769
770 /* Allocate a new image.
771 */
772 struct ureg_src
773 ureg_DECL_image(struct ureg_program *ureg,
774 unsigned index,
775 enum tgsi_texture_type target,
776 unsigned format,
777 boolean wr,
778 boolean raw)
779 {
780 struct ureg_src reg = ureg_src_register(TGSI_FILE_IMAGE, index);
781 unsigned i;
782
783 for (i = 0; i < ureg->nr_images; i++)
784 if (ureg->image[i].index == index)
785 return reg;
786
787 if (i < PIPE_MAX_SHADER_IMAGES) {
788 ureg->image[i].index = index;
789 ureg->image[i].target = target;
790 ureg->image[i].wr = wr;
791 ureg->image[i].raw = raw;
792 ureg->image[i].format = format;
793 ureg->nr_images++;
794 return reg;
795 }
796
797 assert(0);
798 return reg;
799 }
800
801 /* Allocate a new buffer.
802 */
803 struct ureg_src ureg_DECL_buffer(struct ureg_program *ureg, unsigned nr,
804 bool atomic)
805 {
806 struct ureg_src reg = ureg_src_register(TGSI_FILE_BUFFER, nr);
807 unsigned i;
808
809 for (i = 0; i < ureg->nr_buffers; i++)
810 if (ureg->buffer[i].index == nr)
811 return reg;
812
813 if (i < PIPE_MAX_SHADER_BUFFERS) {
814 ureg->buffer[i].index = nr;
815 ureg->buffer[i].atomic = atomic;
816 ureg->nr_buffers++;
817 return reg;
818 }
819
820 assert(0);
821 return reg;
822 }
823
824 /* Allocate a memory area.
825 */
826 struct ureg_src ureg_DECL_memory(struct ureg_program *ureg,
827 unsigned memory_type)
828 {
829 struct ureg_src reg = ureg_src_register(TGSI_FILE_MEMORY, memory_type);
830
831 ureg->use_memory[memory_type] = true;
832 return reg;
833 }
834
835 static int
836 match_or_expand_immediate64( const unsigned *v,
837 int type,
838 unsigned nr,
839 unsigned *v2,
840 unsigned *pnr2,
841 unsigned *swizzle )
842 {
843 unsigned nr2 = *pnr2;
844 unsigned i, j;
845 *swizzle = 0;
846
847 for (i = 0; i < nr; i += 2) {
848 boolean found = FALSE;
849
850 for (j = 0; j < nr2 && !found; j += 2) {
851 if (v[i] == v2[j] && v[i + 1] == v2[j + 1]) {
852 *swizzle |= (j << (i * 2)) | ((j + 1) << ((i + 1) * 2));
853 found = TRUE;
854 }
855 }
856 if (!found) {
857 if ((nr2) >= 4) {
858 return FALSE;
859 }
860
861 v2[nr2] = v[i];
862 v2[nr2 + 1] = v[i + 1];
863
864 *swizzle |= (nr2 << (i * 2)) | ((nr2 + 1) << ((i + 1) * 2));
865 nr2 += 2;
866 }
867 }
868
869 /* Actually expand immediate only when fully succeeded.
870 */
871 *pnr2 = nr2;
872 return TRUE;
873 }
874
875 static int
876 match_or_expand_immediate( const unsigned *v,
877 int type,
878 unsigned nr,
879 unsigned *v2,
880 unsigned *pnr2,
881 unsigned *swizzle )
882 {
883 unsigned nr2 = *pnr2;
884 unsigned i, j;
885
886 if (type == TGSI_IMM_FLOAT64 ||
887 type == TGSI_IMM_UINT64 ||
888 type == TGSI_IMM_INT64)
889 return match_or_expand_immediate64(v, type, nr, v2, pnr2, swizzle);
890
891 *swizzle = 0;
892
893 for (i = 0; i < nr; i++) {
894 boolean found = FALSE;
895
896 for (j = 0; j < nr2 && !found; j++) {
897 if (v[i] == v2[j]) {
898 *swizzle |= j << (i * 2);
899 found = TRUE;
900 }
901 }
902
903 if (!found) {
904 if (nr2 >= 4) {
905 return FALSE;
906 }
907
908 v2[nr2] = v[i];
909 *swizzle |= nr2 << (i * 2);
910 nr2++;
911 }
912 }
913
914 /* Actually expand immediate only when fully succeeded.
915 */
916 *pnr2 = nr2;
917 return TRUE;
918 }
919
920
921 static struct ureg_src
922 decl_immediate( struct ureg_program *ureg,
923 const unsigned *v,
924 unsigned nr,
925 unsigned type )
926 {
927 unsigned i, j;
928 unsigned swizzle = 0;
929
930 /* Could do a first pass where we examine all existing immediates
931 * without expanding.
932 */
933
934 for (i = 0; i < ureg->nr_immediates; i++) {
935 if (ureg->immediate[i].type != type) {
936 continue;
937 }
938 if (match_or_expand_immediate(v,
939 type,
940 nr,
941 ureg->immediate[i].value.u,
942 &ureg->immediate[i].nr,
943 &swizzle)) {
944 goto out;
945 }
946 }
947
948 if (ureg->nr_immediates < UREG_MAX_IMMEDIATE) {
949 i = ureg->nr_immediates++;
950 ureg->immediate[i].type = type;
951 if (match_or_expand_immediate(v,
952 type,
953 nr,
954 ureg->immediate[i].value.u,
955 &ureg->immediate[i].nr,
956 &swizzle)) {
957 goto out;
958 }
959 }
960
961 set_bad(ureg);
962
963 out:
964 /* Make sure that all referenced elements are from this immediate.
965 * Has the effect of making size-one immediates into scalars.
966 */
967 if (type == TGSI_IMM_FLOAT64 ||
968 type == TGSI_IMM_UINT64 ||
969 type == TGSI_IMM_INT64) {
970 for (j = nr; j < 4; j+=2) {
971 swizzle |= (swizzle & 0xf) << (j * 2);
972 }
973 } else {
974 for (j = nr; j < 4; j++) {
975 swizzle |= (swizzle & 0x3) << (j * 2);
976 }
977 }
978 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE, i),
979 (swizzle >> 0) & 0x3,
980 (swizzle >> 2) & 0x3,
981 (swizzle >> 4) & 0x3,
982 (swizzle >> 6) & 0x3);
983 }
984
985
986 struct ureg_src
987 ureg_DECL_immediate( struct ureg_program *ureg,
988 const float *v,
989 unsigned nr )
990 {
991 union {
992 float f[4];
993 unsigned u[4];
994 } fu;
995 unsigned int i;
996
997 for (i = 0; i < nr; i++) {
998 fu.f[i] = v[i];
999 }
1000
1001 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT32);
1002 }
1003
1004 struct ureg_src
1005 ureg_DECL_immediate_f64( struct ureg_program *ureg,
1006 const double *v,
1007 unsigned nr )
1008 {
1009 union {
1010 unsigned u[4];
1011 double d[2];
1012 } fu;
1013 unsigned int i;
1014
1015 assert((nr / 2) < 3);
1016 for (i = 0; i < nr / 2; i++) {
1017 fu.d[i] = v[i];
1018 }
1019
1020 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT64);
1021 }
1022
1023 struct ureg_src
1024 ureg_DECL_immediate_uint( struct ureg_program *ureg,
1025 const unsigned *v,
1026 unsigned nr )
1027 {
1028 return decl_immediate(ureg, v, nr, TGSI_IMM_UINT32);
1029 }
1030
1031
1032 struct ureg_src
1033 ureg_DECL_immediate_block_uint( struct ureg_program *ureg,
1034 const unsigned *v,
1035 unsigned nr )
1036 {
1037 uint index;
1038 uint i;
1039
1040 if (ureg->nr_immediates + (nr + 3) / 4 > UREG_MAX_IMMEDIATE) {
1041 set_bad(ureg);
1042 return ureg_src_register(TGSI_FILE_IMMEDIATE, 0);
1043 }
1044
1045 index = ureg->nr_immediates;
1046 ureg->nr_immediates += (nr + 3) / 4;
1047
1048 for (i = index; i < ureg->nr_immediates; i++) {
1049 ureg->immediate[i].type = TGSI_IMM_UINT32;
1050 ureg->immediate[i].nr = nr > 4 ? 4 : nr;
1051 memcpy(ureg->immediate[i].value.u,
1052 &v[(i - index) * 4],
1053 ureg->immediate[i].nr * sizeof(uint));
1054 nr -= 4;
1055 }
1056
1057 return ureg_src_register(TGSI_FILE_IMMEDIATE, index);
1058 }
1059
1060
1061 struct ureg_src
1062 ureg_DECL_immediate_int( struct ureg_program *ureg,
1063 const int *v,
1064 unsigned nr )
1065 {
1066 return decl_immediate(ureg, (const unsigned *)v, nr, TGSI_IMM_INT32);
1067 }
1068
1069 struct ureg_src
1070 ureg_DECL_immediate_uint64( struct ureg_program *ureg,
1071 const uint64_t *v,
1072 unsigned nr )
1073 {
1074 union {
1075 unsigned u[4];
1076 uint64_t u64[2];
1077 } fu;
1078 unsigned int i;
1079
1080 assert((nr / 2) < 3);
1081 for (i = 0; i < nr / 2; i++) {
1082 fu.u64[i] = v[i];
1083 }
1084
1085 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_UINT64);
1086 }
1087
1088 struct ureg_src
1089 ureg_DECL_immediate_int64( struct ureg_program *ureg,
1090 const int64_t *v,
1091 unsigned nr )
1092 {
1093 union {
1094 unsigned u[4];
1095 int64_t i64[2];
1096 } fu;
1097 unsigned int i;
1098
1099 assert((nr / 2) < 3);
1100 for (i = 0; i < nr / 2; i++) {
1101 fu.i64[i] = v[i];
1102 }
1103
1104 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_INT64);
1105 }
1106
1107 void
1108 ureg_emit_src( struct ureg_program *ureg,
1109 struct ureg_src src )
1110 {
1111 unsigned size = 1 + (src.Indirect ? 1 : 0) +
1112 (src.Dimension ? (src.DimIndirect ? 2 : 1) : 0);
1113
1114 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
1115 unsigned n = 0;
1116
1117 assert(src.File != TGSI_FILE_NULL);
1118 assert(src.File < TGSI_FILE_COUNT);
1119
1120 out[n].value = 0;
1121 out[n].src.File = src.File;
1122 out[n].src.SwizzleX = src.SwizzleX;
1123 out[n].src.SwizzleY = src.SwizzleY;
1124 out[n].src.SwizzleZ = src.SwizzleZ;
1125 out[n].src.SwizzleW = src.SwizzleW;
1126 out[n].src.Index = src.Index;
1127 out[n].src.Negate = src.Negate;
1128 out[0].src.Absolute = src.Absolute;
1129 n++;
1130
1131 if (src.Indirect) {
1132 out[0].src.Indirect = 1;
1133 out[n].value = 0;
1134 out[n].ind.File = src.IndirectFile;
1135 out[n].ind.Swizzle = src.IndirectSwizzle;
1136 out[n].ind.Index = src.IndirectIndex;
1137 if (!ureg->supports_any_inout_decl_range &&
1138 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
1139 out[n].ind.ArrayID = 0;
1140 else
1141 out[n].ind.ArrayID = src.ArrayID;
1142 n++;
1143 }
1144
1145 if (src.Dimension) {
1146 out[0].src.Dimension = 1;
1147 out[n].dim.Dimension = 0;
1148 out[n].dim.Padding = 0;
1149 if (src.DimIndirect) {
1150 out[n].dim.Indirect = 1;
1151 out[n].dim.Index = src.DimensionIndex;
1152 n++;
1153 out[n].value = 0;
1154 out[n].ind.File = src.DimIndFile;
1155 out[n].ind.Swizzle = src.DimIndSwizzle;
1156 out[n].ind.Index = src.DimIndIndex;
1157 if (!ureg->supports_any_inout_decl_range &&
1158 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
1159 out[n].ind.ArrayID = 0;
1160 else
1161 out[n].ind.ArrayID = src.ArrayID;
1162 } else {
1163 out[n].dim.Indirect = 0;
1164 out[n].dim.Index = src.DimensionIndex;
1165 }
1166 n++;
1167 }
1168
1169 assert(n == size);
1170 }
1171
1172
1173 void
1174 ureg_emit_dst( struct ureg_program *ureg,
1175 struct ureg_dst dst )
1176 {
1177 unsigned size = 1 + (dst.Indirect ? 1 : 0) +
1178 (dst.Dimension ? (dst.DimIndirect ? 2 : 1) : 0);
1179
1180 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
1181 unsigned n = 0;
1182
1183 assert(dst.File != TGSI_FILE_NULL);
1184 assert(dst.File != TGSI_FILE_SAMPLER);
1185 assert(dst.File != TGSI_FILE_SAMPLER_VIEW);
1186 assert(dst.File != TGSI_FILE_IMMEDIATE);
1187 assert(dst.File < TGSI_FILE_COUNT);
1188
1189 out[n].value = 0;
1190 out[n].dst.File = dst.File;
1191 out[n].dst.WriteMask = dst.WriteMask;
1192 out[n].dst.Indirect = dst.Indirect;
1193 out[n].dst.Index = dst.Index;
1194 n++;
1195
1196 if (dst.Indirect) {
1197 out[n].value = 0;
1198 out[n].ind.File = dst.IndirectFile;
1199 out[n].ind.Swizzle = dst.IndirectSwizzle;
1200 out[n].ind.Index = dst.IndirectIndex;
1201 if (!ureg->supports_any_inout_decl_range &&
1202 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
1203 out[n].ind.ArrayID = 0;
1204 else
1205 out[n].ind.ArrayID = dst.ArrayID;
1206 n++;
1207 }
1208
1209 if (dst.Dimension) {
1210 out[0].dst.Dimension = 1;
1211 out[n].dim.Dimension = 0;
1212 out[n].dim.Padding = 0;
1213 if (dst.DimIndirect) {
1214 out[n].dim.Indirect = 1;
1215 out[n].dim.Index = dst.DimensionIndex;
1216 n++;
1217 out[n].value = 0;
1218 out[n].ind.File = dst.DimIndFile;
1219 out[n].ind.Swizzle = dst.DimIndSwizzle;
1220 out[n].ind.Index = dst.DimIndIndex;
1221 if (!ureg->supports_any_inout_decl_range &&
1222 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
1223 out[n].ind.ArrayID = 0;
1224 else
1225 out[n].ind.ArrayID = dst.ArrayID;
1226 } else {
1227 out[n].dim.Indirect = 0;
1228 out[n].dim.Index = dst.DimensionIndex;
1229 }
1230 n++;
1231 }
1232
1233 assert(n == size);
1234 }
1235
1236
1237 static void validate( enum tgsi_opcode opcode,
1238 unsigned nr_dst,
1239 unsigned nr_src )
1240 {
1241 #ifdef DEBUG
1242 const struct tgsi_opcode_info *info = tgsi_get_opcode_info( opcode );
1243 assert(info);
1244 if (info) {
1245 assert(nr_dst == info->num_dst);
1246 assert(nr_src == info->num_src);
1247 }
1248 #endif
1249 }
1250
1251 struct ureg_emit_insn_result
1252 ureg_emit_insn(struct ureg_program *ureg,
1253 enum tgsi_opcode opcode,
1254 boolean saturate,
1255 unsigned precise,
1256 unsigned num_dst,
1257 unsigned num_src)
1258 {
1259 union tgsi_any_token *out;
1260 uint count = 1;
1261 struct ureg_emit_insn_result result;
1262
1263 validate( opcode, num_dst, num_src );
1264
1265 out = get_tokens( ureg, DOMAIN_INSN, count );
1266 out[0].insn = tgsi_default_instruction();
1267 out[0].insn.Opcode = opcode;
1268 out[0].insn.Saturate = saturate;
1269 out[0].insn.Precise = precise;
1270 out[0].insn.NumDstRegs = num_dst;
1271 out[0].insn.NumSrcRegs = num_src;
1272
1273 result.insn_token = ureg->domain[DOMAIN_INSN].count - count;
1274 result.extended_token = result.insn_token;
1275
1276 ureg->nr_instructions++;
1277
1278 return result;
1279 }
1280
1281
1282 /**
1283 * Emit a label token.
1284 * \param label_token returns a token number indicating where the label
1285 * needs to be patched later. Later, this value should be passed to the
1286 * ureg_fixup_label() function.
1287 */
1288 void
1289 ureg_emit_label(struct ureg_program *ureg,
1290 unsigned extended_token,
1291 unsigned *label_token )
1292 {
1293 union tgsi_any_token *out, *insn;
1294
1295 if (!label_token)
1296 return;
1297
1298 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1299 out[0].value = 0;
1300
1301 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1302 insn->insn.Label = 1;
1303
1304 *label_token = ureg->domain[DOMAIN_INSN].count - 1;
1305 }
1306
1307 /* Will return a number which can be used in a label to point to the
1308 * next instruction to be emitted.
1309 */
1310 unsigned
1311 ureg_get_instruction_number( struct ureg_program *ureg )
1312 {
1313 return ureg->nr_instructions;
1314 }
1315
1316 /* Patch a given label (expressed as a token number) to point to a
1317 * given instruction (expressed as an instruction number).
1318 */
1319 void
1320 ureg_fixup_label(struct ureg_program *ureg,
1321 unsigned label_token,
1322 unsigned instruction_number )
1323 {
1324 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, label_token );
1325
1326 out->insn_label.Label = instruction_number;
1327 }
1328
1329
1330 void
1331 ureg_emit_texture(struct ureg_program *ureg,
1332 unsigned extended_token,
1333 enum tgsi_texture_type target,
1334 enum tgsi_return_type return_type, unsigned num_offsets)
1335 {
1336 union tgsi_any_token *out, *insn;
1337
1338 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1339 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1340
1341 insn->insn.Texture = 1;
1342
1343 out[0].value = 0;
1344 out[0].insn_texture.Texture = target;
1345 out[0].insn_texture.NumOffsets = num_offsets;
1346 out[0].insn_texture.ReturnType = return_type;
1347 }
1348
1349 void
1350 ureg_emit_texture_offset(struct ureg_program *ureg,
1351 const struct tgsi_texture_offset *offset)
1352 {
1353 union tgsi_any_token *out;
1354
1355 out = get_tokens( ureg, DOMAIN_INSN, 1);
1356
1357 out[0].value = 0;
1358 out[0].insn_texture_offset = *offset;
1359
1360 }
1361
1362 void
1363 ureg_emit_memory(struct ureg_program *ureg,
1364 unsigned extended_token,
1365 unsigned qualifier,
1366 unsigned texture,
1367 unsigned format)
1368 {
1369 union tgsi_any_token *out, *insn;
1370
1371 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1372 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1373
1374 insn->insn.Memory = 1;
1375
1376 out[0].value = 0;
1377 out[0].insn_memory.Qualifier = qualifier;
1378 out[0].insn_memory.Texture = texture;
1379 out[0].insn_memory.Format = format;
1380 }
1381
1382 void
1383 ureg_fixup_insn_size(struct ureg_program *ureg,
1384 unsigned insn )
1385 {
1386 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, insn );
1387
1388 assert(out->insn.Type == TGSI_TOKEN_TYPE_INSTRUCTION);
1389 out->insn.NrTokens = ureg->domain[DOMAIN_INSN].count - insn - 1;
1390 }
1391
1392
1393 void
1394 ureg_insn(struct ureg_program *ureg,
1395 enum tgsi_opcode opcode,
1396 const struct ureg_dst *dst,
1397 unsigned nr_dst,
1398 const struct ureg_src *src,
1399 unsigned nr_src,
1400 unsigned precise )
1401 {
1402 struct ureg_emit_insn_result insn;
1403 unsigned i;
1404 boolean saturate;
1405
1406 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1407 return;
1408 }
1409
1410 saturate = nr_dst ? dst[0].Saturate : FALSE;
1411
1412 insn = ureg_emit_insn(ureg,
1413 opcode,
1414 saturate,
1415 precise,
1416 nr_dst,
1417 nr_src);
1418
1419 for (i = 0; i < nr_dst; i++)
1420 ureg_emit_dst( ureg, dst[i] );
1421
1422 for (i = 0; i < nr_src; i++)
1423 ureg_emit_src( ureg, src[i] );
1424
1425 ureg_fixup_insn_size( ureg, insn.insn_token );
1426 }
1427
1428 void
1429 ureg_tex_insn(struct ureg_program *ureg,
1430 enum tgsi_opcode opcode,
1431 const struct ureg_dst *dst,
1432 unsigned nr_dst,
1433 enum tgsi_texture_type target,
1434 enum tgsi_return_type return_type,
1435 const struct tgsi_texture_offset *texoffsets,
1436 unsigned nr_offset,
1437 const struct ureg_src *src,
1438 unsigned nr_src )
1439 {
1440 struct ureg_emit_insn_result insn;
1441 unsigned i;
1442 boolean saturate;
1443
1444 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1445 return;
1446 }
1447
1448 saturate = nr_dst ? dst[0].Saturate : FALSE;
1449
1450 insn = ureg_emit_insn(ureg,
1451 opcode,
1452 saturate,
1453 0,
1454 nr_dst,
1455 nr_src);
1456
1457 ureg_emit_texture( ureg, insn.extended_token, target, return_type,
1458 nr_offset );
1459
1460 for (i = 0; i < nr_offset; i++)
1461 ureg_emit_texture_offset( ureg, &texoffsets[i]);
1462
1463 for (i = 0; i < nr_dst; i++)
1464 ureg_emit_dst( ureg, dst[i] );
1465
1466 for (i = 0; i < nr_src; i++)
1467 ureg_emit_src( ureg, src[i] );
1468
1469 ureg_fixup_insn_size( ureg, insn.insn_token );
1470 }
1471
1472
1473 void
1474 ureg_memory_insn(struct ureg_program *ureg,
1475 enum tgsi_opcode opcode,
1476 const struct ureg_dst *dst,
1477 unsigned nr_dst,
1478 const struct ureg_src *src,
1479 unsigned nr_src,
1480 unsigned qualifier,
1481 unsigned texture,
1482 unsigned format)
1483 {
1484 struct ureg_emit_insn_result insn;
1485 unsigned i;
1486
1487 insn = ureg_emit_insn(ureg,
1488 opcode,
1489 FALSE,
1490 0,
1491 nr_dst,
1492 nr_src);
1493
1494 ureg_emit_memory(ureg, insn.extended_token, qualifier, texture, format);
1495
1496 for (i = 0; i < nr_dst; i++)
1497 ureg_emit_dst(ureg, dst[i]);
1498
1499 for (i = 0; i < nr_src; i++)
1500 ureg_emit_src(ureg, src[i]);
1501
1502 ureg_fixup_insn_size(ureg, insn.insn_token);
1503 }
1504
1505
1506 static void
1507 emit_decl_semantic(struct ureg_program *ureg,
1508 unsigned file,
1509 unsigned first,
1510 unsigned last,
1511 enum tgsi_semantic semantic_name,
1512 unsigned semantic_index,
1513 unsigned streams,
1514 unsigned usage_mask,
1515 unsigned array_id)
1516 {
1517 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3);
1518
1519 out[0].value = 0;
1520 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1521 out[0].decl.NrTokens = 3;
1522 out[0].decl.File = file;
1523 out[0].decl.UsageMask = usage_mask;
1524 out[0].decl.Semantic = 1;
1525 out[0].decl.Array = array_id != 0;
1526
1527 out[1].value = 0;
1528 out[1].decl_range.First = first;
1529 out[1].decl_range.Last = last;
1530
1531 out[2].value = 0;
1532 out[2].decl_semantic.Name = semantic_name;
1533 out[2].decl_semantic.Index = semantic_index;
1534 out[2].decl_semantic.StreamX = streams & 3;
1535 out[2].decl_semantic.StreamY = (streams >> 2) & 3;
1536 out[2].decl_semantic.StreamZ = (streams >> 4) & 3;
1537 out[2].decl_semantic.StreamW = (streams >> 6) & 3;
1538
1539 if (array_id) {
1540 out[3].value = 0;
1541 out[3].array.ArrayID = array_id;
1542 }
1543 }
1544
1545 static void
1546 emit_decl_atomic_2d(struct ureg_program *ureg,
1547 unsigned first,
1548 unsigned last,
1549 unsigned index2D,
1550 unsigned array_id)
1551 {
1552 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3);
1553
1554 out[0].value = 0;
1555 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1556 out[0].decl.NrTokens = 3;
1557 out[0].decl.File = TGSI_FILE_HW_ATOMIC;
1558 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1559 out[0].decl.Dimension = 1;
1560 out[0].decl.Array = array_id != 0;
1561
1562 out[1].value = 0;
1563 out[1].decl_range.First = first;
1564 out[1].decl_range.Last = last;
1565
1566 out[2].value = 0;
1567 out[2].decl_dim.Index2D = index2D;
1568
1569 if (array_id) {
1570 out[3].value = 0;
1571 out[3].array.ArrayID = array_id;
1572 }
1573 }
1574
1575 static void
1576 emit_decl_fs(struct ureg_program *ureg,
1577 unsigned file,
1578 unsigned first,
1579 unsigned last,
1580 enum tgsi_semantic semantic_name,
1581 unsigned semantic_index,
1582 enum tgsi_interpolate_mode interpolate,
1583 unsigned cylindrical_wrap,
1584 enum tgsi_interpolate_loc interpolate_location,
1585 unsigned array_id,
1586 unsigned usage_mask)
1587 {
1588 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL,
1589 array_id ? 5 : 4);
1590
1591 out[0].value = 0;
1592 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1593 out[0].decl.NrTokens = 4;
1594 out[0].decl.File = file;
1595 out[0].decl.UsageMask = usage_mask;
1596 out[0].decl.Interpolate = 1;
1597 out[0].decl.Semantic = 1;
1598 out[0].decl.Array = array_id != 0;
1599
1600 out[1].value = 0;
1601 out[1].decl_range.First = first;
1602 out[1].decl_range.Last = last;
1603
1604 out[2].value = 0;
1605 out[2].decl_interp.Interpolate = interpolate;
1606 out[2].decl_interp.CylindricalWrap = cylindrical_wrap;
1607 out[2].decl_interp.Location = interpolate_location;
1608
1609 out[3].value = 0;
1610 out[3].decl_semantic.Name = semantic_name;
1611 out[3].decl_semantic.Index = semantic_index;
1612
1613 if (array_id) {
1614 out[4].value = 0;
1615 out[4].array.ArrayID = array_id;
1616 }
1617 }
1618
1619 static void
1620 emit_decl_temps( struct ureg_program *ureg,
1621 unsigned first, unsigned last,
1622 boolean local,
1623 unsigned arrayid )
1624 {
1625 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL,
1626 arrayid ? 3 : 2 );
1627
1628 out[0].value = 0;
1629 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1630 out[0].decl.NrTokens = 2;
1631 out[0].decl.File = TGSI_FILE_TEMPORARY;
1632 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1633 out[0].decl.Local = local;
1634
1635 out[1].value = 0;
1636 out[1].decl_range.First = first;
1637 out[1].decl_range.Last = last;
1638
1639 if (arrayid) {
1640 out[0].decl.Array = 1;
1641 out[2].value = 0;
1642 out[2].array.ArrayID = arrayid;
1643 }
1644 }
1645
1646 static void emit_decl_range( struct ureg_program *ureg,
1647 unsigned file,
1648 unsigned first,
1649 unsigned count )
1650 {
1651 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
1652
1653 out[0].value = 0;
1654 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1655 out[0].decl.NrTokens = 2;
1656 out[0].decl.File = file;
1657 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1658 out[0].decl.Semantic = 0;
1659
1660 out[1].value = 0;
1661 out[1].decl_range.First = first;
1662 out[1].decl_range.Last = first + count - 1;
1663 }
1664
1665 static void
1666 emit_decl_range2D(struct ureg_program *ureg,
1667 unsigned file,
1668 unsigned first,
1669 unsigned last,
1670 unsigned index2D)
1671 {
1672 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1673
1674 out[0].value = 0;
1675 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1676 out[0].decl.NrTokens = 3;
1677 out[0].decl.File = file;
1678 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1679 out[0].decl.Dimension = 1;
1680
1681 out[1].value = 0;
1682 out[1].decl_range.First = first;
1683 out[1].decl_range.Last = last;
1684
1685 out[2].value = 0;
1686 out[2].decl_dim.Index2D = index2D;
1687 }
1688
1689 static void
1690 emit_decl_sampler_view(struct ureg_program *ureg,
1691 unsigned index,
1692 enum tgsi_texture_type target,
1693 enum tgsi_return_type return_type_x,
1694 enum tgsi_return_type return_type_y,
1695 enum tgsi_return_type return_type_z,
1696 enum tgsi_return_type return_type_w )
1697 {
1698 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1699
1700 out[0].value = 0;
1701 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1702 out[0].decl.NrTokens = 3;
1703 out[0].decl.File = TGSI_FILE_SAMPLER_VIEW;
1704 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1705
1706 out[1].value = 0;
1707 out[1].decl_range.First = index;
1708 out[1].decl_range.Last = index;
1709
1710 out[2].value = 0;
1711 out[2].decl_sampler_view.Resource = target;
1712 out[2].decl_sampler_view.ReturnTypeX = return_type_x;
1713 out[2].decl_sampler_view.ReturnTypeY = return_type_y;
1714 out[2].decl_sampler_view.ReturnTypeZ = return_type_z;
1715 out[2].decl_sampler_view.ReturnTypeW = return_type_w;
1716 }
1717
1718 static void
1719 emit_decl_image(struct ureg_program *ureg,
1720 unsigned index,
1721 enum tgsi_texture_type target,
1722 unsigned format,
1723 boolean wr,
1724 boolean raw)
1725 {
1726 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1727
1728 out[0].value = 0;
1729 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1730 out[0].decl.NrTokens = 3;
1731 out[0].decl.File = TGSI_FILE_IMAGE;
1732 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1733
1734 out[1].value = 0;
1735 out[1].decl_range.First = index;
1736 out[1].decl_range.Last = index;
1737
1738 out[2].value = 0;
1739 out[2].decl_image.Resource = target;
1740 out[2].decl_image.Writable = wr;
1741 out[2].decl_image.Raw = raw;
1742 out[2].decl_image.Format = format;
1743 }
1744
1745 static void
1746 emit_decl_buffer(struct ureg_program *ureg,
1747 unsigned index,
1748 bool atomic)
1749 {
1750 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1751
1752 out[0].value = 0;
1753 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1754 out[0].decl.NrTokens = 2;
1755 out[0].decl.File = TGSI_FILE_BUFFER;
1756 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1757 out[0].decl.Atomic = atomic;
1758
1759 out[1].value = 0;
1760 out[1].decl_range.First = index;
1761 out[1].decl_range.Last = index;
1762 }
1763
1764 static void
1765 emit_decl_memory(struct ureg_program *ureg, unsigned memory_type)
1766 {
1767 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1768
1769 out[0].value = 0;
1770 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1771 out[0].decl.NrTokens = 2;
1772 out[0].decl.File = TGSI_FILE_MEMORY;
1773 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1774 out[0].decl.MemType = memory_type;
1775
1776 out[1].value = 0;
1777 out[1].decl_range.First = memory_type;
1778 out[1].decl_range.Last = memory_type;
1779 }
1780
1781 static void
1782 emit_immediate( struct ureg_program *ureg,
1783 const unsigned *v,
1784 unsigned type )
1785 {
1786 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 5 );
1787
1788 out[0].value = 0;
1789 out[0].imm.Type = TGSI_TOKEN_TYPE_IMMEDIATE;
1790 out[0].imm.NrTokens = 5;
1791 out[0].imm.DataType = type;
1792 out[0].imm.Padding = 0;
1793
1794 out[1].imm_data.Uint = v[0];
1795 out[2].imm_data.Uint = v[1];
1796 out[3].imm_data.Uint = v[2];
1797 out[4].imm_data.Uint = v[3];
1798 }
1799
1800 static void
1801 emit_property(struct ureg_program *ureg,
1802 unsigned name,
1803 unsigned data)
1804 {
1805 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1806
1807 out[0].value = 0;
1808 out[0].prop.Type = TGSI_TOKEN_TYPE_PROPERTY;
1809 out[0].prop.NrTokens = 2;
1810 out[0].prop.PropertyName = name;
1811
1812 out[1].prop_data.Data = data;
1813 }
1814
1815
1816 static void emit_decls( struct ureg_program *ureg )
1817 {
1818 unsigned i,j;
1819
1820 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++)
1821 if (ureg->properties[i] != ~0)
1822 emit_property(ureg, i, ureg->properties[i]);
1823
1824 if (ureg->processor == PIPE_SHADER_VERTEX) {
1825 for (i = 0; i < PIPE_MAX_ATTRIBS; i++) {
1826 if (ureg->vs_inputs[i/32] & (1u << (i%32))) {
1827 emit_decl_range( ureg, TGSI_FILE_INPUT, i, 1 );
1828 }
1829 }
1830 } else if (ureg->processor == PIPE_SHADER_FRAGMENT) {
1831 if (ureg->supports_any_inout_decl_range) {
1832 for (i = 0; i < ureg->nr_inputs; i++) {
1833 emit_decl_fs(ureg,
1834 TGSI_FILE_INPUT,
1835 ureg->input[i].first,
1836 ureg->input[i].last,
1837 ureg->input[i].semantic_name,
1838 ureg->input[i].semantic_index,
1839 ureg->input[i].interp,
1840 ureg->input[i].cylindrical_wrap,
1841 ureg->input[i].interp_location,
1842 ureg->input[i].array_id,
1843 ureg->input[i].usage_mask);
1844 }
1845 }
1846 else {
1847 for (i = 0; i < ureg->nr_inputs; i++) {
1848 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1849 emit_decl_fs(ureg,
1850 TGSI_FILE_INPUT,
1851 j, j,
1852 ureg->input[i].semantic_name,
1853 ureg->input[i].semantic_index +
1854 (j - ureg->input[i].first),
1855 ureg->input[i].interp,
1856 ureg->input[i].cylindrical_wrap,
1857 ureg->input[i].interp_location, 0,
1858 ureg->input[i].usage_mask);
1859 }
1860 }
1861 }
1862 } else {
1863 if (ureg->supports_any_inout_decl_range) {
1864 for (i = 0; i < ureg->nr_inputs; i++) {
1865 emit_decl_semantic(ureg,
1866 TGSI_FILE_INPUT,
1867 ureg->input[i].first,
1868 ureg->input[i].last,
1869 ureg->input[i].semantic_name,
1870 ureg->input[i].semantic_index,
1871 0,
1872 TGSI_WRITEMASK_XYZW,
1873 ureg->input[i].array_id);
1874 }
1875 }
1876 else {
1877 for (i = 0; i < ureg->nr_inputs; i++) {
1878 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1879 emit_decl_semantic(ureg,
1880 TGSI_FILE_INPUT,
1881 j, j,
1882 ureg->input[i].semantic_name,
1883 ureg->input[i].semantic_index +
1884 (j - ureg->input[i].first),
1885 0,
1886 TGSI_WRITEMASK_XYZW, 0);
1887 }
1888 }
1889 }
1890 }
1891
1892 for (i = 0; i < ureg->nr_system_values; i++) {
1893 emit_decl_semantic(ureg,
1894 TGSI_FILE_SYSTEM_VALUE,
1895 i,
1896 i,
1897 ureg->system_value[i].semantic_name,
1898 ureg->system_value[i].semantic_index,
1899 0,
1900 TGSI_WRITEMASK_XYZW, 0);
1901 }
1902
1903 if (ureg->supports_any_inout_decl_range) {
1904 for (i = 0; i < ureg->nr_outputs; i++) {
1905 emit_decl_semantic(ureg,
1906 TGSI_FILE_OUTPUT,
1907 ureg->output[i].first,
1908 ureg->output[i].last,
1909 ureg->output[i].semantic_name,
1910 ureg->output[i].semantic_index,
1911 ureg->output[i].streams,
1912 ureg->output[i].usage_mask,
1913 ureg->output[i].array_id);
1914 }
1915 }
1916 else {
1917 for (i = 0; i < ureg->nr_outputs; i++) {
1918 for (j = ureg->output[i].first; j <= ureg->output[i].last; j++) {
1919 emit_decl_semantic(ureg,
1920 TGSI_FILE_OUTPUT,
1921 j, j,
1922 ureg->output[i].semantic_name,
1923 ureg->output[i].semantic_index +
1924 (j - ureg->output[i].first),
1925 ureg->output[i].streams,
1926 ureg->output[i].usage_mask, 0);
1927 }
1928 }
1929 }
1930
1931 for (i = 0; i < ureg->nr_samplers; i++) {
1932 emit_decl_range( ureg,
1933 TGSI_FILE_SAMPLER,
1934 ureg->sampler[i].Index, 1 );
1935 }
1936
1937 for (i = 0; i < ureg->nr_sampler_views; i++) {
1938 emit_decl_sampler_view(ureg,
1939 ureg->sampler_view[i].index,
1940 ureg->sampler_view[i].target,
1941 ureg->sampler_view[i].return_type_x,
1942 ureg->sampler_view[i].return_type_y,
1943 ureg->sampler_view[i].return_type_z,
1944 ureg->sampler_view[i].return_type_w);
1945 }
1946
1947 for (i = 0; i < ureg->nr_images; i++) {
1948 emit_decl_image(ureg,
1949 ureg->image[i].index,
1950 ureg->image[i].target,
1951 ureg->image[i].format,
1952 ureg->image[i].wr,
1953 ureg->image[i].raw);
1954 }
1955
1956 for (i = 0; i < ureg->nr_buffers; i++) {
1957 emit_decl_buffer(ureg, ureg->buffer[i].index, ureg->buffer[i].atomic);
1958 }
1959
1960 for (i = 0; i < TGSI_MEMORY_TYPE_COUNT; i++) {
1961 if (ureg->use_memory[i])
1962 emit_decl_memory(ureg, i);
1963 }
1964
1965 for (i = 0; i < PIPE_MAX_CONSTANT_BUFFERS; i++) {
1966 struct const_decl *decl = &ureg->const_decls[i];
1967
1968 if (decl->nr_constant_ranges) {
1969 uint j;
1970
1971 for (j = 0; j < decl->nr_constant_ranges; j++) {
1972 emit_decl_range2D(ureg,
1973 TGSI_FILE_CONSTANT,
1974 decl->constant_range[j].first,
1975 decl->constant_range[j].last,
1976 i);
1977 }
1978 }
1979 }
1980
1981 for (i = 0; i < PIPE_MAX_HW_ATOMIC_BUFFERS; i++) {
1982 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[i];
1983
1984 if (decl->nr_hw_atomic_ranges) {
1985 uint j;
1986
1987 for (j = 0; j < decl->nr_hw_atomic_ranges; j++) {
1988 emit_decl_atomic_2d(ureg,
1989 decl->hw_atomic_range[j].first,
1990 decl->hw_atomic_range[j].last,
1991 i,
1992 decl->hw_atomic_range[j].array_id);
1993 }
1994 }
1995 }
1996
1997 if (ureg->nr_temps) {
1998 unsigned array = 0;
1999 for (i = 0; i < ureg->nr_temps;) {
2000 boolean local = util_bitmask_get(ureg->local_temps, i);
2001 unsigned first = i;
2002 i = util_bitmask_get_next_index(ureg->decl_temps, i + 1);
2003 if (i == UTIL_BITMASK_INVALID_INDEX)
2004 i = ureg->nr_temps;
2005
2006 if (array < ureg->nr_array_temps && ureg->array_temps[array] == first)
2007 emit_decl_temps( ureg, first, i - 1, local, ++array );
2008 else
2009 emit_decl_temps( ureg, first, i - 1, local, 0 );
2010 }
2011 }
2012
2013 if (ureg->nr_addrs) {
2014 emit_decl_range( ureg,
2015 TGSI_FILE_ADDRESS,
2016 0, ureg->nr_addrs );
2017 }
2018
2019 for (i = 0; i < ureg->nr_immediates; i++) {
2020 emit_immediate( ureg,
2021 ureg->immediate[i].value.u,
2022 ureg->immediate[i].type );
2023 }
2024 }
2025
2026 /* Append the instruction tokens onto the declarations to build a
2027 * contiguous stream suitable to send to the driver.
2028 */
2029 static void copy_instructions( struct ureg_program *ureg )
2030 {
2031 unsigned nr_tokens = ureg->domain[DOMAIN_INSN].count;
2032 union tgsi_any_token *out = get_tokens( ureg,
2033 DOMAIN_DECL,
2034 nr_tokens );
2035
2036 memcpy(out,
2037 ureg->domain[DOMAIN_INSN].tokens,
2038 nr_tokens * sizeof out[0] );
2039 }
2040
2041
2042 static void
2043 fixup_header_size(struct ureg_program *ureg)
2044 {
2045 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_DECL, 0 );
2046
2047 out->header.BodySize = ureg->domain[DOMAIN_DECL].count - 2;
2048 }
2049
2050
2051 static void
2052 emit_header( struct ureg_program *ureg )
2053 {
2054 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
2055
2056 out[0].header.HeaderSize = 2;
2057 out[0].header.BodySize = 0;
2058
2059 out[1].processor.Processor = ureg->processor;
2060 out[1].processor.Padding = 0;
2061 }
2062
2063
2064 const struct tgsi_token *ureg_finalize( struct ureg_program *ureg )
2065 {
2066 const struct tgsi_token *tokens;
2067
2068 switch (ureg->processor) {
2069 case PIPE_SHADER_VERTEX:
2070 case PIPE_SHADER_TESS_EVAL:
2071 ureg_property(ureg, TGSI_PROPERTY_NEXT_SHADER,
2072 ureg->next_shader_processor == -1 ?
2073 PIPE_SHADER_FRAGMENT :
2074 ureg->next_shader_processor);
2075 break;
2076 default:
2077 ; /* nothing */
2078 }
2079
2080 emit_header( ureg );
2081 emit_decls( ureg );
2082 copy_instructions( ureg );
2083 fixup_header_size( ureg );
2084
2085 if (ureg->domain[0].tokens == error_tokens ||
2086 ureg->domain[1].tokens == error_tokens) {
2087 debug_printf("%s: error in generated shader\n", __FUNCTION__);
2088 assert(0);
2089 return NULL;
2090 }
2091
2092 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
2093
2094 if (0) {
2095 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__,
2096 ureg->domain[DOMAIN_DECL].count);
2097 tgsi_dump( tokens, 0 );
2098 }
2099
2100 #if DEBUG
2101 if (tokens && !tgsi_sanity_check(tokens)) {
2102 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
2103 tgsi_dump(tokens, 0);
2104 assert(0);
2105 }
2106 #endif
2107
2108
2109 return tokens;
2110 }
2111
2112
2113 void *ureg_create_shader( struct ureg_program *ureg,
2114 struct pipe_context *pipe,
2115 const struct pipe_stream_output_info *so )
2116 {
2117 struct pipe_shader_state state;
2118
2119 pipe_shader_state_from_tgsi(&state, ureg_finalize(ureg));
2120 if(!state.tokens)
2121 return NULL;
2122
2123 if (so)
2124 state.stream_output = *so;
2125
2126 switch (ureg->processor) {
2127 case PIPE_SHADER_VERTEX:
2128 return pipe->create_vs_state(pipe, &state);
2129 case PIPE_SHADER_TESS_CTRL:
2130 return pipe->create_tcs_state(pipe, &state);
2131 case PIPE_SHADER_TESS_EVAL:
2132 return pipe->create_tes_state(pipe, &state);
2133 case PIPE_SHADER_GEOMETRY:
2134 return pipe->create_gs_state(pipe, &state);
2135 case PIPE_SHADER_FRAGMENT:
2136 return pipe->create_fs_state(pipe, &state);
2137 default:
2138 return NULL;
2139 }
2140 }
2141
2142
2143 const struct tgsi_token *ureg_get_tokens( struct ureg_program *ureg,
2144 unsigned *nr_tokens )
2145 {
2146 const struct tgsi_token *tokens;
2147
2148 ureg_finalize(ureg);
2149
2150 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
2151
2152 if (nr_tokens)
2153 *nr_tokens = ureg->domain[DOMAIN_DECL].count;
2154
2155 ureg->domain[DOMAIN_DECL].tokens = 0;
2156 ureg->domain[DOMAIN_DECL].size = 0;
2157 ureg->domain[DOMAIN_DECL].order = 0;
2158 ureg->domain[DOMAIN_DECL].count = 0;
2159
2160 return tokens;
2161 }
2162
2163
2164 void ureg_free_tokens( const struct tgsi_token *tokens )
2165 {
2166 FREE((struct tgsi_token *)tokens);
2167 }
2168
2169
2170 struct ureg_program *
2171 ureg_create(enum pipe_shader_type processor)
2172 {
2173 return ureg_create_with_screen(processor, NULL);
2174 }
2175
2176
2177 struct ureg_program *
2178 ureg_create_with_screen(enum pipe_shader_type processor,
2179 struct pipe_screen *screen)
2180 {
2181 int i;
2182 struct ureg_program *ureg = CALLOC_STRUCT( ureg_program );
2183 if (!ureg)
2184 goto no_ureg;
2185
2186 ureg->processor = processor;
2187 ureg->supports_any_inout_decl_range =
2188 screen &&
2189 screen->get_shader_param(screen, processor,
2190 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE) != 0;
2191 ureg->next_shader_processor = -1;
2192
2193 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++)
2194 ureg->properties[i] = ~0;
2195
2196 ureg->free_temps = util_bitmask_create();
2197 if (ureg->free_temps == NULL)
2198 goto no_free_temps;
2199
2200 ureg->local_temps = util_bitmask_create();
2201 if (ureg->local_temps == NULL)
2202 goto no_local_temps;
2203
2204 ureg->decl_temps = util_bitmask_create();
2205 if (ureg->decl_temps == NULL)
2206 goto no_decl_temps;
2207
2208 return ureg;
2209
2210 no_decl_temps:
2211 util_bitmask_destroy(ureg->local_temps);
2212 no_local_temps:
2213 util_bitmask_destroy(ureg->free_temps);
2214 no_free_temps:
2215 FREE(ureg);
2216 no_ureg:
2217 return NULL;
2218 }
2219
2220
2221 void
2222 ureg_set_next_shader_processor(struct ureg_program *ureg, unsigned processor)
2223 {
2224 ureg->next_shader_processor = processor;
2225 }
2226
2227
2228 unsigned
2229 ureg_get_nr_outputs( const struct ureg_program *ureg )
2230 {
2231 if (!ureg)
2232 return 0;
2233 return ureg->nr_outputs;
2234 }
2235
2236
2237 void ureg_destroy( struct ureg_program *ureg )
2238 {
2239 unsigned i;
2240
2241 for (i = 0; i < ARRAY_SIZE(ureg->domain); i++) {
2242 if (ureg->domain[i].tokens &&
2243 ureg->domain[i].tokens != error_tokens)
2244 FREE(ureg->domain[i].tokens);
2245 }
2246
2247 util_bitmask_destroy(ureg->free_temps);
2248 util_bitmask_destroy(ureg->local_temps);
2249 util_bitmask_destroy(ureg->decl_temps);
2250
2251 FREE(ureg);
2252 }