gallium/aux/tgsi_ureg.c: remove unused parameter from match_or_expand_immediate64
[mesa.git] / src / gallium / auxiliary / tgsi / tgsi_ureg.c
1 /**************************************************************************
2 *
3 * Copyright 2009-2010 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_info.h"
35 #include "tgsi/tgsi_dump.h"
36 #include "tgsi/tgsi_sanity.h"
37 #include "util/u_debug.h"
38 #include "util/u_inlines.h"
39 #include "util/u_memory.h"
40 #include "util/u_math.h"
41 #include "util/u_bitmask.h"
42
43 union tgsi_any_token {
44 struct tgsi_header header;
45 struct tgsi_processor processor;
46 struct tgsi_token token;
47 struct tgsi_property prop;
48 struct tgsi_property_data prop_data;
49 struct tgsi_declaration decl;
50 struct tgsi_declaration_range decl_range;
51 struct tgsi_declaration_dimension decl_dim;
52 struct tgsi_declaration_interp decl_interp;
53 struct tgsi_declaration_image decl_image;
54 struct tgsi_declaration_semantic decl_semantic;
55 struct tgsi_declaration_sampler_view decl_sampler_view;
56 struct tgsi_declaration_array array;
57 struct tgsi_immediate imm;
58 union tgsi_immediate_data imm_data;
59 struct tgsi_instruction insn;
60 struct tgsi_instruction_label insn_label;
61 struct tgsi_instruction_texture insn_texture;
62 struct tgsi_instruction_memory insn_memory;
63 struct tgsi_texture_offset insn_texture_offset;
64 struct tgsi_src_register src;
65 struct tgsi_ind_register ind;
66 struct tgsi_dimension dim;
67 struct tgsi_dst_register dst;
68 unsigned value;
69 };
70
71
72 struct ureg_tokens {
73 union tgsi_any_token *tokens;
74 unsigned size;
75 unsigned order;
76 unsigned count;
77 };
78
79 #define UREG_MAX_INPUT (4 * PIPE_MAX_SHADER_INPUTS)
80 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
81 #define UREG_MAX_OUTPUT (4 * PIPE_MAX_SHADER_OUTPUTS)
82 #define UREG_MAX_CONSTANT_RANGE 32
83 #define UREG_MAX_HW_ATOMIC_RANGE 32
84 #define UREG_MAX_IMMEDIATE 4096
85 #define UREG_MAX_ADDR 3
86 #define UREG_MAX_ARRAY_TEMPS 256
87
88 struct const_decl {
89 struct {
90 unsigned first;
91 unsigned last;
92 } constant_range[UREG_MAX_CONSTANT_RANGE];
93 unsigned nr_constant_ranges;
94 };
95
96 struct hw_atomic_decl {
97 struct {
98 unsigned first;
99 unsigned last;
100 unsigned array_id;
101 } hw_atomic_range[UREG_MAX_HW_ATOMIC_RANGE];
102 unsigned nr_hw_atomic_ranges;
103 };
104
105 #define DOMAIN_DECL 0
106 #define DOMAIN_INSN 1
107
108 struct ureg_program
109 {
110 enum pipe_shader_type processor;
111 bool supports_any_inout_decl_range;
112 int next_shader_processor;
113
114 struct {
115 enum tgsi_semantic semantic_name;
116 unsigned semantic_index;
117 enum tgsi_interpolate_mode interp;
118 unsigned char cylindrical_wrap;
119 unsigned char usage_mask;
120 enum tgsi_interpolate_loc interp_location;
121 unsigned first;
122 unsigned last;
123 unsigned array_id;
124 } input[UREG_MAX_INPUT];
125 unsigned nr_inputs, nr_input_regs;
126
127 unsigned vs_inputs[PIPE_MAX_ATTRIBS/32];
128
129 struct {
130 enum tgsi_semantic semantic_name;
131 unsigned semantic_index;
132 } system_value[UREG_MAX_SYSTEM_VALUE];
133 unsigned nr_system_values;
134
135 struct {
136 enum tgsi_semantic semantic_name;
137 unsigned semantic_index;
138 unsigned streams;
139 unsigned usage_mask; /* = TGSI_WRITEMASK_* */
140 unsigned first;
141 unsigned last;
142 unsigned array_id;
143 } output[UREG_MAX_OUTPUT];
144 unsigned nr_outputs, nr_output_regs;
145
146 struct {
147 union {
148 float f[4];
149 unsigned u[4];
150 int i[4];
151 } value;
152 unsigned nr;
153 unsigned type;
154 } immediate[UREG_MAX_IMMEDIATE];
155 unsigned nr_immediates;
156
157 struct ureg_src sampler[PIPE_MAX_SAMPLERS];
158 unsigned nr_samplers;
159
160 struct {
161 unsigned index;
162 enum tgsi_texture_type target;
163 enum tgsi_return_type return_type_x;
164 enum tgsi_return_type return_type_y;
165 enum tgsi_return_type return_type_z;
166 enum tgsi_return_type return_type_w;
167 } sampler_view[PIPE_MAX_SHADER_SAMPLER_VIEWS];
168 unsigned nr_sampler_views;
169
170 struct {
171 unsigned index;
172 enum tgsi_texture_type target;
173 enum pipe_format format;
174 boolean wr;
175 boolean raw;
176 } image[PIPE_MAX_SHADER_IMAGES];
177 unsigned nr_images;
178
179 struct {
180 unsigned index;
181 bool atomic;
182 } buffer[PIPE_MAX_SHADER_BUFFERS];
183 unsigned nr_buffers;
184
185 struct util_bitmask *free_temps;
186 struct util_bitmask *local_temps;
187 struct util_bitmask *decl_temps;
188 unsigned nr_temps;
189
190 unsigned array_temps[UREG_MAX_ARRAY_TEMPS];
191 unsigned nr_array_temps;
192
193 struct const_decl const_decls[PIPE_MAX_CONSTANT_BUFFERS];
194
195 struct hw_atomic_decl hw_atomic_decls[PIPE_MAX_HW_ATOMIC_BUFFERS];
196
197 unsigned properties[TGSI_PROPERTY_COUNT];
198
199 unsigned nr_addrs;
200 unsigned nr_instructions;
201
202 struct ureg_tokens domain[2];
203
204 bool use_memory[TGSI_MEMORY_TYPE_COUNT];
205 };
206
207 static union tgsi_any_token error_tokens[32];
208
209 static void tokens_error( struct ureg_tokens *tokens )
210 {
211 if (tokens->tokens && tokens->tokens != error_tokens)
212 FREE(tokens->tokens);
213
214 tokens->tokens = error_tokens;
215 tokens->size = ARRAY_SIZE(error_tokens);
216 tokens->count = 0;
217 }
218
219
220 static void tokens_expand( struct ureg_tokens *tokens,
221 unsigned count )
222 {
223 unsigned old_size = tokens->size * sizeof(unsigned);
224
225 if (tokens->tokens == error_tokens) {
226 return;
227 }
228
229 while (tokens->count + count > tokens->size) {
230 tokens->size = (1 << ++tokens->order);
231 }
232
233 tokens->tokens = REALLOC(tokens->tokens,
234 old_size,
235 tokens->size * sizeof(unsigned));
236 if (tokens->tokens == NULL) {
237 tokens_error(tokens);
238 }
239 }
240
241 static void set_bad( struct ureg_program *ureg )
242 {
243 tokens_error(&ureg->domain[0]);
244 }
245
246
247
248 static union tgsi_any_token *get_tokens( struct ureg_program *ureg,
249 unsigned domain,
250 unsigned count )
251 {
252 struct ureg_tokens *tokens = &ureg->domain[domain];
253 union tgsi_any_token *result;
254
255 if (tokens->count + count > tokens->size)
256 tokens_expand(tokens, count);
257
258 result = &tokens->tokens[tokens->count];
259 tokens->count += count;
260 return result;
261 }
262
263
264 static union tgsi_any_token *retrieve_token( struct ureg_program *ureg,
265 unsigned domain,
266 unsigned nr )
267 {
268 if (ureg->domain[domain].tokens == error_tokens)
269 return &error_tokens[0];
270
271 return &ureg->domain[domain].tokens[nr];
272 }
273
274
275 void
276 ureg_property(struct ureg_program *ureg, unsigned name, unsigned value)
277 {
278 assert(name < ARRAY_SIZE(ureg->properties));
279 ureg->properties[name] = value;
280 }
281
282 struct ureg_src
283 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program *ureg,
284 enum tgsi_semantic semantic_name,
285 unsigned semantic_index,
286 enum tgsi_interpolate_mode interp_mode,
287 unsigned cylindrical_wrap,
288 enum tgsi_interpolate_loc interp_location,
289 unsigned index,
290 unsigned usage_mask,
291 unsigned array_id,
292 unsigned array_size)
293 {
294 unsigned i;
295
296 assert(usage_mask != 0);
297 assert(usage_mask <= TGSI_WRITEMASK_XYZW);
298
299 for (i = 0; i < ureg->nr_inputs; i++) {
300 if (ureg->input[i].semantic_name == semantic_name &&
301 ureg->input[i].semantic_index == semantic_index) {
302 assert(ureg->input[i].interp == interp_mode);
303 assert(ureg->input[i].cylindrical_wrap == cylindrical_wrap);
304 assert(ureg->input[i].interp_location == interp_location);
305 if (ureg->input[i].array_id == array_id) {
306 ureg->input[i].usage_mask |= usage_mask;
307 goto out;
308 }
309 assert((ureg->input[i].usage_mask & usage_mask) == 0);
310 }
311 }
312
313 if (ureg->nr_inputs < UREG_MAX_INPUT) {
314 assert(array_size >= 1);
315 ureg->input[i].semantic_name = semantic_name;
316 ureg->input[i].semantic_index = semantic_index;
317 ureg->input[i].interp = interp_mode;
318 ureg->input[i].cylindrical_wrap = cylindrical_wrap;
319 ureg->input[i].interp_location = interp_location;
320 ureg->input[i].first = index;
321 ureg->input[i].last = index + array_size - 1;
322 ureg->input[i].array_id = array_id;
323 ureg->input[i].usage_mask = usage_mask;
324 ureg->nr_input_regs = MAX2(ureg->nr_input_regs, index + array_size);
325 ureg->nr_inputs++;
326 } else {
327 set_bad(ureg);
328 }
329
330 out:
331 return ureg_src_array_register(TGSI_FILE_INPUT, ureg->input[i].first,
332 array_id);
333 }
334
335 struct ureg_src
336 ureg_DECL_fs_input_cyl_centroid(struct ureg_program *ureg,
337 enum tgsi_semantic semantic_name,
338 unsigned semantic_index,
339 enum tgsi_interpolate_mode interp_mode,
340 unsigned cylindrical_wrap,
341 enum tgsi_interpolate_loc interp_location,
342 unsigned array_id,
343 unsigned array_size)
344 {
345 return ureg_DECL_fs_input_cyl_centroid_layout(ureg,
346 semantic_name, semantic_index, interp_mode,
347 cylindrical_wrap, interp_location,
348 ureg->nr_input_regs, TGSI_WRITEMASK_XYZW, array_id, array_size);
349 }
350
351
352 struct ureg_src
353 ureg_DECL_vs_input( struct ureg_program *ureg,
354 unsigned index )
355 {
356 assert(ureg->processor == PIPE_SHADER_VERTEX);
357 assert(index / 32 < ARRAY_SIZE(ureg->vs_inputs));
358
359 ureg->vs_inputs[index/32] |= 1 << (index % 32);
360 return ureg_src_register( TGSI_FILE_INPUT, index );
361 }
362
363
364 struct ureg_src
365 ureg_DECL_input_layout(struct ureg_program *ureg,
366 enum tgsi_semantic semantic_name,
367 unsigned semantic_index,
368 unsigned index,
369 unsigned usage_mask,
370 unsigned array_id,
371 unsigned array_size)
372 {
373 return ureg_DECL_fs_input_cyl_centroid_layout(ureg,
374 semantic_name, semantic_index,
375 TGSI_INTERPOLATE_CONSTANT, 0, TGSI_INTERPOLATE_LOC_CENTER,
376 index, usage_mask, array_id, array_size);
377 }
378
379
380 struct ureg_src
381 ureg_DECL_input(struct ureg_program *ureg,
382 enum tgsi_semantic semantic_name,
383 unsigned semantic_index,
384 unsigned array_id,
385 unsigned array_size)
386 {
387 return ureg_DECL_fs_input_cyl_centroid(ureg, semantic_name, semantic_index,
388 TGSI_INTERPOLATE_CONSTANT, 0,
389 TGSI_INTERPOLATE_LOC_CENTER,
390 array_id, array_size);
391 }
392
393
394 struct ureg_src
395 ureg_DECL_system_value(struct ureg_program *ureg,
396 enum tgsi_semantic semantic_name,
397 unsigned semantic_index)
398 {
399 unsigned i;
400
401 for (i = 0; i < ureg->nr_system_values; i++) {
402 if (ureg->system_value[i].semantic_name == semantic_name &&
403 ureg->system_value[i].semantic_index == semantic_index) {
404 goto out;
405 }
406 }
407
408 if (ureg->nr_system_values < UREG_MAX_SYSTEM_VALUE) {
409 ureg->system_value[ureg->nr_system_values].semantic_name = semantic_name;
410 ureg->system_value[ureg->nr_system_values].semantic_index = semantic_index;
411 i = ureg->nr_system_values;
412 ureg->nr_system_values++;
413 } else {
414 set_bad(ureg);
415 }
416
417 out:
418 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE, i);
419 }
420
421
422 struct ureg_dst
423 ureg_DECL_output_layout(struct ureg_program *ureg,
424 enum tgsi_semantic semantic_name,
425 unsigned semantic_index,
426 unsigned streams,
427 unsigned index,
428 unsigned usage_mask,
429 unsigned array_id,
430 unsigned array_size)
431 {
432 unsigned i;
433
434 assert(usage_mask != 0);
435 assert(!(streams & 0x03) || (usage_mask & 1));
436 assert(!(streams & 0x0c) || (usage_mask & 2));
437 assert(!(streams & 0x30) || (usage_mask & 4));
438 assert(!(streams & 0xc0) || (usage_mask & 8));
439
440 for (i = 0; i < ureg->nr_outputs; i++) {
441 if (ureg->output[i].semantic_name == semantic_name &&
442 ureg->output[i].semantic_index == semantic_index) {
443 if (ureg->output[i].array_id == array_id) {
444 ureg->output[i].usage_mask |= usage_mask;
445 goto out;
446 }
447 assert((ureg->output[i].usage_mask & usage_mask) == 0);
448 }
449 }
450
451 if (ureg->nr_outputs < UREG_MAX_OUTPUT) {
452 ureg->output[i].semantic_name = semantic_name;
453 ureg->output[i].semantic_index = semantic_index;
454 ureg->output[i].usage_mask = usage_mask;
455 ureg->output[i].first = index;
456 ureg->output[i].last = index + array_size - 1;
457 ureg->output[i].array_id = array_id;
458 ureg->nr_output_regs = MAX2(ureg->nr_output_regs, index + array_size);
459 ureg->nr_outputs++;
460 }
461 else {
462 set_bad( ureg );
463 i = 0;
464 }
465
466 out:
467 ureg->output[i].streams |= streams;
468
469 return ureg_dst_array_register(TGSI_FILE_OUTPUT, ureg->output[i].first,
470 array_id);
471 }
472
473
474 struct ureg_dst
475 ureg_DECL_output_masked(struct ureg_program *ureg,
476 unsigned name,
477 unsigned index,
478 unsigned usage_mask,
479 unsigned array_id,
480 unsigned array_size)
481 {
482 return ureg_DECL_output_layout(ureg, name, index, 0,
483 ureg->nr_output_regs, usage_mask, array_id, array_size);
484 }
485
486
487 struct ureg_dst
488 ureg_DECL_output(struct ureg_program *ureg,
489 enum tgsi_semantic name,
490 unsigned index)
491 {
492 return ureg_DECL_output_masked(ureg, name, index, TGSI_WRITEMASK_XYZW,
493 0, 1);
494 }
495
496 struct ureg_dst
497 ureg_DECL_output_array(struct ureg_program *ureg,
498 enum tgsi_semantic semantic_name,
499 unsigned semantic_index,
500 unsigned array_id,
501 unsigned array_size)
502 {
503 return ureg_DECL_output_masked(ureg, semantic_name, semantic_index,
504 TGSI_WRITEMASK_XYZW,
505 array_id, array_size);
506 }
507
508
509 /* Returns a new constant register. Keep track of which have been
510 * referred to so that we can emit decls later.
511 *
512 * Constant operands declared with this function must be addressed
513 * with a two-dimensional index.
514 *
515 * There is nothing in this code to bind this constant to any tracked
516 * value or manage any constant_buffer contents -- that's the
517 * resposibility of the calling code.
518 */
519 void
520 ureg_DECL_constant2D(struct ureg_program *ureg,
521 unsigned first,
522 unsigned last,
523 unsigned index2D)
524 {
525 struct const_decl *decl = &ureg->const_decls[index2D];
526
527 assert(index2D < PIPE_MAX_CONSTANT_BUFFERS);
528
529 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
530 uint i = decl->nr_constant_ranges++;
531
532 decl->constant_range[i].first = first;
533 decl->constant_range[i].last = last;
534 }
535 }
536
537
538 /* A one-dimensional, deprecated version of ureg_DECL_constant2D().
539 *
540 * Constant operands declared with this function must be addressed
541 * with a one-dimensional index.
542 */
543 struct ureg_src
544 ureg_DECL_constant(struct ureg_program *ureg,
545 unsigned index)
546 {
547 struct const_decl *decl = &ureg->const_decls[0];
548 unsigned minconst = index, maxconst = index;
549 unsigned i;
550
551 /* Inside existing range?
552 */
553 for (i = 0; i < decl->nr_constant_ranges; i++) {
554 if (decl->constant_range[i].first <= index &&
555 decl->constant_range[i].last >= index) {
556 goto out;
557 }
558 }
559
560 /* Extend existing range?
561 */
562 for (i = 0; i < decl->nr_constant_ranges; i++) {
563 if (decl->constant_range[i].last == index - 1) {
564 decl->constant_range[i].last = index;
565 goto out;
566 }
567
568 if (decl->constant_range[i].first == index + 1) {
569 decl->constant_range[i].first = index;
570 goto out;
571 }
572
573 minconst = MIN2(minconst, decl->constant_range[i].first);
574 maxconst = MAX2(maxconst, decl->constant_range[i].last);
575 }
576
577 /* Create new range?
578 */
579 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
580 i = decl->nr_constant_ranges++;
581 decl->constant_range[i].first = index;
582 decl->constant_range[i].last = index;
583 goto out;
584 }
585
586 /* Collapse all ranges down to one:
587 */
588 i = 0;
589 decl->constant_range[0].first = minconst;
590 decl->constant_range[0].last = maxconst;
591 decl->nr_constant_ranges = 1;
592
593 out:
594 assert(i < decl->nr_constant_ranges);
595 assert(decl->constant_range[i].first <= index);
596 assert(decl->constant_range[i].last >= index);
597
598 struct ureg_src src = ureg_src_register(TGSI_FILE_CONSTANT, index);
599 return ureg_src_dimension(src, 0);
600 }
601
602
603 /* Returns a new hw atomic register. Keep track of which have been
604 * referred to so that we can emit decls later.
605 */
606 void
607 ureg_DECL_hw_atomic(struct ureg_program *ureg,
608 unsigned first,
609 unsigned last,
610 unsigned buffer_id,
611 unsigned array_id)
612 {
613 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[buffer_id];
614
615 if (decl->nr_hw_atomic_ranges < UREG_MAX_HW_ATOMIC_RANGE) {
616 uint i = decl->nr_hw_atomic_ranges++;
617
618 decl->hw_atomic_range[i].first = first;
619 decl->hw_atomic_range[i].last = last;
620 decl->hw_atomic_range[i].array_id = array_id;
621 } else {
622 set_bad(ureg);
623 }
624 }
625
626 static struct ureg_dst alloc_temporary( struct ureg_program *ureg,
627 boolean local )
628 {
629 unsigned i;
630
631 /* Look for a released temporary.
632 */
633 for (i = util_bitmask_get_first_index(ureg->free_temps);
634 i != UTIL_BITMASK_INVALID_INDEX;
635 i = util_bitmask_get_next_index(ureg->free_temps, i + 1)) {
636 if (util_bitmask_get(ureg->local_temps, i) == local)
637 break;
638 }
639
640 /* Or allocate a new one.
641 */
642 if (i == UTIL_BITMASK_INVALID_INDEX) {
643 i = ureg->nr_temps++;
644
645 if (local)
646 util_bitmask_set(ureg->local_temps, i);
647
648 /* Start a new declaration when the local flag changes */
649 if (!i || util_bitmask_get(ureg->local_temps, i - 1) != local)
650 util_bitmask_set(ureg->decl_temps, i);
651 }
652
653 util_bitmask_clear(ureg->free_temps, i);
654
655 return ureg_dst_register( TGSI_FILE_TEMPORARY, i );
656 }
657
658 struct ureg_dst ureg_DECL_temporary( struct ureg_program *ureg )
659 {
660 return alloc_temporary(ureg, FALSE);
661 }
662
663 struct ureg_dst ureg_DECL_local_temporary( struct ureg_program *ureg )
664 {
665 return alloc_temporary(ureg, TRUE);
666 }
667
668 struct ureg_dst ureg_DECL_array_temporary( struct ureg_program *ureg,
669 unsigned size,
670 boolean local )
671 {
672 unsigned i = ureg->nr_temps;
673 struct ureg_dst dst = ureg_dst_register( TGSI_FILE_TEMPORARY, i );
674
675 if (local)
676 util_bitmask_set(ureg->local_temps, i);
677
678 /* Always start a new declaration at the start */
679 util_bitmask_set(ureg->decl_temps, i);
680
681 ureg->nr_temps += size;
682
683 /* and also at the end of the array */
684 util_bitmask_set(ureg->decl_temps, ureg->nr_temps);
685
686 if (ureg->nr_array_temps < UREG_MAX_ARRAY_TEMPS) {
687 ureg->array_temps[ureg->nr_array_temps++] = i;
688 dst.ArrayID = ureg->nr_array_temps;
689 }
690
691 return dst;
692 }
693
694 void ureg_release_temporary( struct ureg_program *ureg,
695 struct ureg_dst tmp )
696 {
697 if(tmp.File == TGSI_FILE_TEMPORARY)
698 util_bitmask_set(ureg->free_temps, tmp.Index);
699 }
700
701
702 /* Allocate a new address register.
703 */
704 struct ureg_dst ureg_DECL_address( struct ureg_program *ureg )
705 {
706 if (ureg->nr_addrs < UREG_MAX_ADDR)
707 return ureg_dst_register( TGSI_FILE_ADDRESS, ureg->nr_addrs++ );
708
709 assert( 0 );
710 return ureg_dst_register( TGSI_FILE_ADDRESS, 0 );
711 }
712
713 /* Allocate a new sampler.
714 */
715 struct ureg_src ureg_DECL_sampler( struct ureg_program *ureg,
716 unsigned nr )
717 {
718 unsigned i;
719
720 for (i = 0; i < ureg->nr_samplers; i++)
721 if (ureg->sampler[i].Index == (int)nr)
722 return ureg->sampler[i];
723
724 if (i < PIPE_MAX_SAMPLERS) {
725 ureg->sampler[i] = ureg_src_register( TGSI_FILE_SAMPLER, nr );
726 ureg->nr_samplers++;
727 return ureg->sampler[i];
728 }
729
730 assert( 0 );
731 return ureg->sampler[0];
732 }
733
734 /*
735 * Allocate a new shader sampler view.
736 */
737 struct ureg_src
738 ureg_DECL_sampler_view(struct ureg_program *ureg,
739 unsigned index,
740 enum tgsi_texture_type target,
741 enum tgsi_return_type return_type_x,
742 enum tgsi_return_type return_type_y,
743 enum tgsi_return_type return_type_z,
744 enum tgsi_return_type return_type_w)
745 {
746 struct ureg_src reg = ureg_src_register(TGSI_FILE_SAMPLER_VIEW, index);
747 uint i;
748
749 for (i = 0; i < ureg->nr_sampler_views; i++) {
750 if (ureg->sampler_view[i].index == index) {
751 return reg;
752 }
753 }
754
755 if (i < PIPE_MAX_SHADER_SAMPLER_VIEWS) {
756 ureg->sampler_view[i].index = index;
757 ureg->sampler_view[i].target = target;
758 ureg->sampler_view[i].return_type_x = return_type_x;
759 ureg->sampler_view[i].return_type_y = return_type_y;
760 ureg->sampler_view[i].return_type_z = return_type_z;
761 ureg->sampler_view[i].return_type_w = return_type_w;
762 ureg->nr_sampler_views++;
763 return reg;
764 }
765
766 assert(0);
767 return reg;
768 }
769
770 /* Allocate a new image.
771 */
772 struct ureg_src
773 ureg_DECL_image(struct ureg_program *ureg,
774 unsigned index,
775 enum tgsi_texture_type target,
776 enum pipe_format format,
777 boolean wr,
778 boolean raw)
779 {
780 struct ureg_src reg = ureg_src_register(TGSI_FILE_IMAGE, index);
781 unsigned i;
782
783 for (i = 0; i < ureg->nr_images; i++)
784 if (ureg->image[i].index == index)
785 return reg;
786
787 if (i < PIPE_MAX_SHADER_IMAGES) {
788 ureg->image[i].index = index;
789 ureg->image[i].target = target;
790 ureg->image[i].wr = wr;
791 ureg->image[i].raw = raw;
792 ureg->image[i].format = format;
793 ureg->nr_images++;
794 return reg;
795 }
796
797 assert(0);
798 return reg;
799 }
800
801 /* Allocate a new buffer.
802 */
803 struct ureg_src ureg_DECL_buffer(struct ureg_program *ureg, unsigned nr,
804 bool atomic)
805 {
806 struct ureg_src reg = ureg_src_register(TGSI_FILE_BUFFER, nr);
807 unsigned i;
808
809 for (i = 0; i < ureg->nr_buffers; i++)
810 if (ureg->buffer[i].index == nr)
811 return reg;
812
813 if (i < PIPE_MAX_SHADER_BUFFERS) {
814 ureg->buffer[i].index = nr;
815 ureg->buffer[i].atomic = atomic;
816 ureg->nr_buffers++;
817 return reg;
818 }
819
820 assert(0);
821 return reg;
822 }
823
824 /* Allocate a memory area.
825 */
826 struct ureg_src ureg_DECL_memory(struct ureg_program *ureg,
827 unsigned memory_type)
828 {
829 struct ureg_src reg = ureg_src_register(TGSI_FILE_MEMORY, memory_type);
830
831 ureg->use_memory[memory_type] = true;
832 return reg;
833 }
834
835 static int
836 match_or_expand_immediate64( const unsigned *v,
837 unsigned nr,
838 unsigned *v2,
839 unsigned *pnr2,
840 unsigned *swizzle )
841 {
842 unsigned nr2 = *pnr2;
843 unsigned i, j;
844 *swizzle = 0;
845
846 for (i = 0; i < nr; i += 2) {
847 boolean found = FALSE;
848
849 for (j = 0; j < nr2 && !found; j += 2) {
850 if (v[i] == v2[j] && v[i + 1] == v2[j + 1]) {
851 *swizzle |= (j << (i * 2)) | ((j + 1) << ((i + 1) * 2));
852 found = TRUE;
853 }
854 }
855 if (!found) {
856 if ((nr2) >= 4) {
857 return FALSE;
858 }
859
860 v2[nr2] = v[i];
861 v2[nr2 + 1] = v[i + 1];
862
863 *swizzle |= (nr2 << (i * 2)) | ((nr2 + 1) << ((i + 1) * 2));
864 nr2 += 2;
865 }
866 }
867
868 /* Actually expand immediate only when fully succeeded.
869 */
870 *pnr2 = nr2;
871 return TRUE;
872 }
873
874 static int
875 match_or_expand_immediate( const unsigned *v,
876 int type,
877 unsigned nr,
878 unsigned *v2,
879 unsigned *pnr2,
880 unsigned *swizzle )
881 {
882 unsigned nr2 = *pnr2;
883 unsigned i, j;
884
885 if (type == TGSI_IMM_FLOAT64 ||
886 type == TGSI_IMM_UINT64 ||
887 type == TGSI_IMM_INT64)
888 return match_or_expand_immediate64(v, nr, v2, pnr2, swizzle);
889
890 *swizzle = 0;
891
892 for (i = 0; i < nr; i++) {
893 boolean found = FALSE;
894
895 for (j = 0; j < nr2 && !found; j++) {
896 if (v[i] == v2[j]) {
897 *swizzle |= j << (i * 2);
898 found = TRUE;
899 }
900 }
901
902 if (!found) {
903 if (nr2 >= 4) {
904 return FALSE;
905 }
906
907 v2[nr2] = v[i];
908 *swizzle |= nr2 << (i * 2);
909 nr2++;
910 }
911 }
912
913 /* Actually expand immediate only when fully succeeded.
914 */
915 *pnr2 = nr2;
916 return TRUE;
917 }
918
919
920 static struct ureg_src
921 decl_immediate( struct ureg_program *ureg,
922 const unsigned *v,
923 unsigned nr,
924 unsigned type )
925 {
926 unsigned i, j;
927 unsigned swizzle = 0;
928
929 /* Could do a first pass where we examine all existing immediates
930 * without expanding.
931 */
932
933 for (i = 0; i < ureg->nr_immediates; i++) {
934 if (ureg->immediate[i].type != type) {
935 continue;
936 }
937 if (match_or_expand_immediate(v,
938 type,
939 nr,
940 ureg->immediate[i].value.u,
941 &ureg->immediate[i].nr,
942 &swizzle)) {
943 goto out;
944 }
945 }
946
947 if (ureg->nr_immediates < UREG_MAX_IMMEDIATE) {
948 i = ureg->nr_immediates++;
949 ureg->immediate[i].type = type;
950 if (match_or_expand_immediate(v,
951 type,
952 nr,
953 ureg->immediate[i].value.u,
954 &ureg->immediate[i].nr,
955 &swizzle)) {
956 goto out;
957 }
958 }
959
960 set_bad(ureg);
961
962 out:
963 /* Make sure that all referenced elements are from this immediate.
964 * Has the effect of making size-one immediates into scalars.
965 */
966 if (type == TGSI_IMM_FLOAT64 ||
967 type == TGSI_IMM_UINT64 ||
968 type == TGSI_IMM_INT64) {
969 for (j = nr; j < 4; j+=2) {
970 swizzle |= (swizzle & 0xf) << (j * 2);
971 }
972 } else {
973 for (j = nr; j < 4; j++) {
974 swizzle |= (swizzle & 0x3) << (j * 2);
975 }
976 }
977 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE, i),
978 (swizzle >> 0) & 0x3,
979 (swizzle >> 2) & 0x3,
980 (swizzle >> 4) & 0x3,
981 (swizzle >> 6) & 0x3);
982 }
983
984
985 struct ureg_src
986 ureg_DECL_immediate( struct ureg_program *ureg,
987 const float *v,
988 unsigned nr )
989 {
990 union {
991 float f[4];
992 unsigned u[4];
993 } fu;
994 unsigned int i;
995
996 for (i = 0; i < nr; i++) {
997 fu.f[i] = v[i];
998 }
999
1000 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT32);
1001 }
1002
1003 struct ureg_src
1004 ureg_DECL_immediate_f64( struct ureg_program *ureg,
1005 const double *v,
1006 unsigned nr )
1007 {
1008 union {
1009 unsigned u[4];
1010 double d[2];
1011 } fu;
1012 unsigned int i;
1013
1014 assert((nr / 2) < 3);
1015 for (i = 0; i < nr / 2; i++) {
1016 fu.d[i] = v[i];
1017 }
1018
1019 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT64);
1020 }
1021
1022 struct ureg_src
1023 ureg_DECL_immediate_uint( struct ureg_program *ureg,
1024 const unsigned *v,
1025 unsigned nr )
1026 {
1027 return decl_immediate(ureg, v, nr, TGSI_IMM_UINT32);
1028 }
1029
1030
1031 struct ureg_src
1032 ureg_DECL_immediate_block_uint( struct ureg_program *ureg,
1033 const unsigned *v,
1034 unsigned nr )
1035 {
1036 uint index;
1037 uint i;
1038
1039 if (ureg->nr_immediates + (nr + 3) / 4 > UREG_MAX_IMMEDIATE) {
1040 set_bad(ureg);
1041 return ureg_src_register(TGSI_FILE_IMMEDIATE, 0);
1042 }
1043
1044 index = ureg->nr_immediates;
1045 ureg->nr_immediates += (nr + 3) / 4;
1046
1047 for (i = index; i < ureg->nr_immediates; i++) {
1048 ureg->immediate[i].type = TGSI_IMM_UINT32;
1049 ureg->immediate[i].nr = nr > 4 ? 4 : nr;
1050 memcpy(ureg->immediate[i].value.u,
1051 &v[(i - index) * 4],
1052 ureg->immediate[i].nr * sizeof(uint));
1053 nr -= 4;
1054 }
1055
1056 return ureg_src_register(TGSI_FILE_IMMEDIATE, index);
1057 }
1058
1059
1060 struct ureg_src
1061 ureg_DECL_immediate_int( struct ureg_program *ureg,
1062 const int *v,
1063 unsigned nr )
1064 {
1065 return decl_immediate(ureg, (const unsigned *)v, nr, TGSI_IMM_INT32);
1066 }
1067
1068 struct ureg_src
1069 ureg_DECL_immediate_uint64( struct ureg_program *ureg,
1070 const uint64_t *v,
1071 unsigned nr )
1072 {
1073 union {
1074 unsigned u[4];
1075 uint64_t u64[2];
1076 } fu;
1077 unsigned int i;
1078
1079 assert((nr / 2) < 3);
1080 for (i = 0; i < nr / 2; i++) {
1081 fu.u64[i] = v[i];
1082 }
1083
1084 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_UINT64);
1085 }
1086
1087 struct ureg_src
1088 ureg_DECL_immediate_int64( struct ureg_program *ureg,
1089 const int64_t *v,
1090 unsigned nr )
1091 {
1092 union {
1093 unsigned u[4];
1094 int64_t i64[2];
1095 } fu;
1096 unsigned int i;
1097
1098 assert((nr / 2) < 3);
1099 for (i = 0; i < nr / 2; i++) {
1100 fu.i64[i] = v[i];
1101 }
1102
1103 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_INT64);
1104 }
1105
1106 void
1107 ureg_emit_src( struct ureg_program *ureg,
1108 struct ureg_src src )
1109 {
1110 unsigned size = 1 + (src.Indirect ? 1 : 0) +
1111 (src.Dimension ? (src.DimIndirect ? 2 : 1) : 0);
1112
1113 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
1114 unsigned n = 0;
1115
1116 assert(src.File != TGSI_FILE_NULL);
1117 assert(src.File < TGSI_FILE_COUNT);
1118
1119 out[n].value = 0;
1120 out[n].src.File = src.File;
1121 out[n].src.SwizzleX = src.SwizzleX;
1122 out[n].src.SwizzleY = src.SwizzleY;
1123 out[n].src.SwizzleZ = src.SwizzleZ;
1124 out[n].src.SwizzleW = src.SwizzleW;
1125 out[n].src.Index = src.Index;
1126 out[n].src.Negate = src.Negate;
1127 out[0].src.Absolute = src.Absolute;
1128 n++;
1129
1130 if (src.Indirect) {
1131 out[0].src.Indirect = 1;
1132 out[n].value = 0;
1133 out[n].ind.File = src.IndirectFile;
1134 out[n].ind.Swizzle = src.IndirectSwizzle;
1135 out[n].ind.Index = src.IndirectIndex;
1136 if (!ureg->supports_any_inout_decl_range &&
1137 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
1138 out[n].ind.ArrayID = 0;
1139 else
1140 out[n].ind.ArrayID = src.ArrayID;
1141 n++;
1142 }
1143
1144 if (src.Dimension) {
1145 out[0].src.Dimension = 1;
1146 out[n].dim.Dimension = 0;
1147 out[n].dim.Padding = 0;
1148 if (src.DimIndirect) {
1149 out[n].dim.Indirect = 1;
1150 out[n].dim.Index = src.DimensionIndex;
1151 n++;
1152 out[n].value = 0;
1153 out[n].ind.File = src.DimIndFile;
1154 out[n].ind.Swizzle = src.DimIndSwizzle;
1155 out[n].ind.Index = src.DimIndIndex;
1156 if (!ureg->supports_any_inout_decl_range &&
1157 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
1158 out[n].ind.ArrayID = 0;
1159 else
1160 out[n].ind.ArrayID = src.ArrayID;
1161 } else {
1162 out[n].dim.Indirect = 0;
1163 out[n].dim.Index = src.DimensionIndex;
1164 }
1165 n++;
1166 }
1167
1168 assert(n == size);
1169 }
1170
1171
1172 void
1173 ureg_emit_dst( struct ureg_program *ureg,
1174 struct ureg_dst dst )
1175 {
1176 unsigned size = 1 + (dst.Indirect ? 1 : 0) +
1177 (dst.Dimension ? (dst.DimIndirect ? 2 : 1) : 0);
1178
1179 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
1180 unsigned n = 0;
1181
1182 assert(dst.File != TGSI_FILE_NULL);
1183 assert(dst.File != TGSI_FILE_SAMPLER);
1184 assert(dst.File != TGSI_FILE_SAMPLER_VIEW);
1185 assert(dst.File != TGSI_FILE_IMMEDIATE);
1186 assert(dst.File < TGSI_FILE_COUNT);
1187
1188 out[n].value = 0;
1189 out[n].dst.File = dst.File;
1190 out[n].dst.WriteMask = dst.WriteMask;
1191 out[n].dst.Indirect = dst.Indirect;
1192 out[n].dst.Index = dst.Index;
1193 n++;
1194
1195 if (dst.Indirect) {
1196 out[n].value = 0;
1197 out[n].ind.File = dst.IndirectFile;
1198 out[n].ind.Swizzle = dst.IndirectSwizzle;
1199 out[n].ind.Index = dst.IndirectIndex;
1200 if (!ureg->supports_any_inout_decl_range &&
1201 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
1202 out[n].ind.ArrayID = 0;
1203 else
1204 out[n].ind.ArrayID = dst.ArrayID;
1205 n++;
1206 }
1207
1208 if (dst.Dimension) {
1209 out[0].dst.Dimension = 1;
1210 out[n].dim.Dimension = 0;
1211 out[n].dim.Padding = 0;
1212 if (dst.DimIndirect) {
1213 out[n].dim.Indirect = 1;
1214 out[n].dim.Index = dst.DimensionIndex;
1215 n++;
1216 out[n].value = 0;
1217 out[n].ind.File = dst.DimIndFile;
1218 out[n].ind.Swizzle = dst.DimIndSwizzle;
1219 out[n].ind.Index = dst.DimIndIndex;
1220 if (!ureg->supports_any_inout_decl_range &&
1221 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
1222 out[n].ind.ArrayID = 0;
1223 else
1224 out[n].ind.ArrayID = dst.ArrayID;
1225 } else {
1226 out[n].dim.Indirect = 0;
1227 out[n].dim.Index = dst.DimensionIndex;
1228 }
1229 n++;
1230 }
1231
1232 assert(n == size);
1233 }
1234
1235
1236 static void validate( enum tgsi_opcode opcode,
1237 unsigned nr_dst,
1238 unsigned nr_src )
1239 {
1240 #ifdef DEBUG
1241 const struct tgsi_opcode_info *info = tgsi_get_opcode_info( opcode );
1242 assert(info);
1243 if (info) {
1244 assert(nr_dst == info->num_dst);
1245 assert(nr_src == info->num_src);
1246 }
1247 #endif
1248 }
1249
1250 struct ureg_emit_insn_result
1251 ureg_emit_insn(struct ureg_program *ureg,
1252 enum tgsi_opcode opcode,
1253 boolean saturate,
1254 unsigned precise,
1255 unsigned num_dst,
1256 unsigned num_src)
1257 {
1258 union tgsi_any_token *out;
1259 uint count = 1;
1260 struct ureg_emit_insn_result result;
1261
1262 validate( opcode, num_dst, num_src );
1263
1264 out = get_tokens( ureg, DOMAIN_INSN, count );
1265 out[0].insn = tgsi_default_instruction();
1266 out[0].insn.Opcode = opcode;
1267 out[0].insn.Saturate = saturate;
1268 out[0].insn.Precise = precise;
1269 out[0].insn.NumDstRegs = num_dst;
1270 out[0].insn.NumSrcRegs = num_src;
1271
1272 result.insn_token = ureg->domain[DOMAIN_INSN].count - count;
1273 result.extended_token = result.insn_token;
1274
1275 ureg->nr_instructions++;
1276
1277 return result;
1278 }
1279
1280
1281 /**
1282 * Emit a label token.
1283 * \param label_token returns a token number indicating where the label
1284 * needs to be patched later. Later, this value should be passed to the
1285 * ureg_fixup_label() function.
1286 */
1287 void
1288 ureg_emit_label(struct ureg_program *ureg,
1289 unsigned extended_token,
1290 unsigned *label_token )
1291 {
1292 union tgsi_any_token *out, *insn;
1293
1294 if (!label_token)
1295 return;
1296
1297 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1298 out[0].value = 0;
1299
1300 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1301 insn->insn.Label = 1;
1302
1303 *label_token = ureg->domain[DOMAIN_INSN].count - 1;
1304 }
1305
1306 /* Will return a number which can be used in a label to point to the
1307 * next instruction to be emitted.
1308 */
1309 unsigned
1310 ureg_get_instruction_number( struct ureg_program *ureg )
1311 {
1312 return ureg->nr_instructions;
1313 }
1314
1315 /* Patch a given label (expressed as a token number) to point to a
1316 * given instruction (expressed as an instruction number).
1317 */
1318 void
1319 ureg_fixup_label(struct ureg_program *ureg,
1320 unsigned label_token,
1321 unsigned instruction_number )
1322 {
1323 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, label_token );
1324
1325 out->insn_label.Label = instruction_number;
1326 }
1327
1328
1329 void
1330 ureg_emit_texture(struct ureg_program *ureg,
1331 unsigned extended_token,
1332 enum tgsi_texture_type target,
1333 enum tgsi_return_type return_type, unsigned num_offsets)
1334 {
1335 union tgsi_any_token *out, *insn;
1336
1337 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1338 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1339
1340 insn->insn.Texture = 1;
1341
1342 out[0].value = 0;
1343 out[0].insn_texture.Texture = target;
1344 out[0].insn_texture.NumOffsets = num_offsets;
1345 out[0].insn_texture.ReturnType = return_type;
1346 }
1347
1348 void
1349 ureg_emit_texture_offset(struct ureg_program *ureg,
1350 const struct tgsi_texture_offset *offset)
1351 {
1352 union tgsi_any_token *out;
1353
1354 out = get_tokens( ureg, DOMAIN_INSN, 1);
1355
1356 out[0].value = 0;
1357 out[0].insn_texture_offset = *offset;
1358
1359 }
1360
1361 void
1362 ureg_emit_memory(struct ureg_program *ureg,
1363 unsigned extended_token,
1364 unsigned qualifier,
1365 enum tgsi_texture_type texture,
1366 enum pipe_format format)
1367 {
1368 union tgsi_any_token *out, *insn;
1369
1370 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1371 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1372
1373 insn->insn.Memory = 1;
1374
1375 out[0].value = 0;
1376 out[0].insn_memory.Qualifier = qualifier;
1377 out[0].insn_memory.Texture = texture;
1378 out[0].insn_memory.Format = format;
1379 }
1380
1381 void
1382 ureg_fixup_insn_size(struct ureg_program *ureg,
1383 unsigned insn )
1384 {
1385 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, insn );
1386
1387 assert(out->insn.Type == TGSI_TOKEN_TYPE_INSTRUCTION);
1388 out->insn.NrTokens = ureg->domain[DOMAIN_INSN].count - insn - 1;
1389 }
1390
1391
1392 void
1393 ureg_insn(struct ureg_program *ureg,
1394 enum tgsi_opcode opcode,
1395 const struct ureg_dst *dst,
1396 unsigned nr_dst,
1397 const struct ureg_src *src,
1398 unsigned nr_src,
1399 unsigned precise )
1400 {
1401 struct ureg_emit_insn_result insn;
1402 unsigned i;
1403 boolean saturate;
1404
1405 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1406 return;
1407 }
1408
1409 saturate = nr_dst ? dst[0].Saturate : FALSE;
1410
1411 insn = ureg_emit_insn(ureg,
1412 opcode,
1413 saturate,
1414 precise,
1415 nr_dst,
1416 nr_src);
1417
1418 for (i = 0; i < nr_dst; i++)
1419 ureg_emit_dst( ureg, dst[i] );
1420
1421 for (i = 0; i < nr_src; i++)
1422 ureg_emit_src( ureg, src[i] );
1423
1424 ureg_fixup_insn_size( ureg, insn.insn_token );
1425 }
1426
1427 void
1428 ureg_tex_insn(struct ureg_program *ureg,
1429 enum tgsi_opcode opcode,
1430 const struct ureg_dst *dst,
1431 unsigned nr_dst,
1432 enum tgsi_texture_type target,
1433 enum tgsi_return_type return_type,
1434 const struct tgsi_texture_offset *texoffsets,
1435 unsigned nr_offset,
1436 const struct ureg_src *src,
1437 unsigned nr_src )
1438 {
1439 struct ureg_emit_insn_result insn;
1440 unsigned i;
1441 boolean saturate;
1442
1443 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1444 return;
1445 }
1446
1447 saturate = nr_dst ? dst[0].Saturate : FALSE;
1448
1449 insn = ureg_emit_insn(ureg,
1450 opcode,
1451 saturate,
1452 0,
1453 nr_dst,
1454 nr_src);
1455
1456 ureg_emit_texture( ureg, insn.extended_token, target, return_type,
1457 nr_offset );
1458
1459 for (i = 0; i < nr_offset; i++)
1460 ureg_emit_texture_offset( ureg, &texoffsets[i]);
1461
1462 for (i = 0; i < nr_dst; i++)
1463 ureg_emit_dst( ureg, dst[i] );
1464
1465 for (i = 0; i < nr_src; i++)
1466 ureg_emit_src( ureg, src[i] );
1467
1468 ureg_fixup_insn_size( ureg, insn.insn_token );
1469 }
1470
1471
1472 void
1473 ureg_memory_insn(struct ureg_program *ureg,
1474 enum tgsi_opcode opcode,
1475 const struct ureg_dst *dst,
1476 unsigned nr_dst,
1477 const struct ureg_src *src,
1478 unsigned nr_src,
1479 unsigned qualifier,
1480 enum tgsi_texture_type texture,
1481 enum pipe_format format)
1482 {
1483 struct ureg_emit_insn_result insn;
1484 unsigned i;
1485
1486 insn = ureg_emit_insn(ureg,
1487 opcode,
1488 FALSE,
1489 0,
1490 nr_dst,
1491 nr_src);
1492
1493 ureg_emit_memory(ureg, insn.extended_token, qualifier, texture, format);
1494
1495 for (i = 0; i < nr_dst; i++)
1496 ureg_emit_dst(ureg, dst[i]);
1497
1498 for (i = 0; i < nr_src; i++)
1499 ureg_emit_src(ureg, src[i]);
1500
1501 ureg_fixup_insn_size(ureg, insn.insn_token);
1502 }
1503
1504
1505 static void
1506 emit_decl_semantic(struct ureg_program *ureg,
1507 unsigned file,
1508 unsigned first,
1509 unsigned last,
1510 enum tgsi_semantic semantic_name,
1511 unsigned semantic_index,
1512 unsigned streams,
1513 unsigned usage_mask,
1514 unsigned array_id)
1515 {
1516 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3);
1517
1518 out[0].value = 0;
1519 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1520 out[0].decl.NrTokens = 3;
1521 out[0].decl.File = file;
1522 out[0].decl.UsageMask = usage_mask;
1523 out[0].decl.Semantic = 1;
1524 out[0].decl.Array = array_id != 0;
1525
1526 out[1].value = 0;
1527 out[1].decl_range.First = first;
1528 out[1].decl_range.Last = last;
1529
1530 out[2].value = 0;
1531 out[2].decl_semantic.Name = semantic_name;
1532 out[2].decl_semantic.Index = semantic_index;
1533 out[2].decl_semantic.StreamX = streams & 3;
1534 out[2].decl_semantic.StreamY = (streams >> 2) & 3;
1535 out[2].decl_semantic.StreamZ = (streams >> 4) & 3;
1536 out[2].decl_semantic.StreamW = (streams >> 6) & 3;
1537
1538 if (array_id) {
1539 out[3].value = 0;
1540 out[3].array.ArrayID = array_id;
1541 }
1542 }
1543
1544 static void
1545 emit_decl_atomic_2d(struct ureg_program *ureg,
1546 unsigned first,
1547 unsigned last,
1548 unsigned index2D,
1549 unsigned array_id)
1550 {
1551 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3);
1552
1553 out[0].value = 0;
1554 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1555 out[0].decl.NrTokens = 3;
1556 out[0].decl.File = TGSI_FILE_HW_ATOMIC;
1557 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1558 out[0].decl.Dimension = 1;
1559 out[0].decl.Array = array_id != 0;
1560
1561 out[1].value = 0;
1562 out[1].decl_range.First = first;
1563 out[1].decl_range.Last = last;
1564
1565 out[2].value = 0;
1566 out[2].decl_dim.Index2D = index2D;
1567
1568 if (array_id) {
1569 out[3].value = 0;
1570 out[3].array.ArrayID = array_id;
1571 }
1572 }
1573
1574 static void
1575 emit_decl_fs(struct ureg_program *ureg,
1576 unsigned file,
1577 unsigned first,
1578 unsigned last,
1579 enum tgsi_semantic semantic_name,
1580 unsigned semantic_index,
1581 enum tgsi_interpolate_mode interpolate,
1582 unsigned cylindrical_wrap,
1583 enum tgsi_interpolate_loc interpolate_location,
1584 unsigned array_id,
1585 unsigned usage_mask)
1586 {
1587 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL,
1588 array_id ? 5 : 4);
1589
1590 out[0].value = 0;
1591 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1592 out[0].decl.NrTokens = 4;
1593 out[0].decl.File = file;
1594 out[0].decl.UsageMask = usage_mask;
1595 out[0].decl.Interpolate = 1;
1596 out[0].decl.Semantic = 1;
1597 out[0].decl.Array = array_id != 0;
1598
1599 out[1].value = 0;
1600 out[1].decl_range.First = first;
1601 out[1].decl_range.Last = last;
1602
1603 out[2].value = 0;
1604 out[2].decl_interp.Interpolate = interpolate;
1605 out[2].decl_interp.CylindricalWrap = cylindrical_wrap;
1606 out[2].decl_interp.Location = interpolate_location;
1607
1608 out[3].value = 0;
1609 out[3].decl_semantic.Name = semantic_name;
1610 out[3].decl_semantic.Index = semantic_index;
1611
1612 if (array_id) {
1613 out[4].value = 0;
1614 out[4].array.ArrayID = array_id;
1615 }
1616 }
1617
1618 static void
1619 emit_decl_temps( struct ureg_program *ureg,
1620 unsigned first, unsigned last,
1621 boolean local,
1622 unsigned arrayid )
1623 {
1624 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL,
1625 arrayid ? 3 : 2 );
1626
1627 out[0].value = 0;
1628 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1629 out[0].decl.NrTokens = 2;
1630 out[0].decl.File = TGSI_FILE_TEMPORARY;
1631 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1632 out[0].decl.Local = local;
1633
1634 out[1].value = 0;
1635 out[1].decl_range.First = first;
1636 out[1].decl_range.Last = last;
1637
1638 if (arrayid) {
1639 out[0].decl.Array = 1;
1640 out[2].value = 0;
1641 out[2].array.ArrayID = arrayid;
1642 }
1643 }
1644
1645 static void emit_decl_range( struct ureg_program *ureg,
1646 unsigned file,
1647 unsigned first,
1648 unsigned count )
1649 {
1650 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
1651
1652 out[0].value = 0;
1653 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1654 out[0].decl.NrTokens = 2;
1655 out[0].decl.File = file;
1656 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1657 out[0].decl.Semantic = 0;
1658
1659 out[1].value = 0;
1660 out[1].decl_range.First = first;
1661 out[1].decl_range.Last = first + count - 1;
1662 }
1663
1664 static void
1665 emit_decl_range2D(struct ureg_program *ureg,
1666 unsigned file,
1667 unsigned first,
1668 unsigned last,
1669 unsigned index2D)
1670 {
1671 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1672
1673 out[0].value = 0;
1674 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1675 out[0].decl.NrTokens = 3;
1676 out[0].decl.File = file;
1677 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1678 out[0].decl.Dimension = 1;
1679
1680 out[1].value = 0;
1681 out[1].decl_range.First = first;
1682 out[1].decl_range.Last = last;
1683
1684 out[2].value = 0;
1685 out[2].decl_dim.Index2D = index2D;
1686 }
1687
1688 static void
1689 emit_decl_sampler_view(struct ureg_program *ureg,
1690 unsigned index,
1691 enum tgsi_texture_type target,
1692 enum tgsi_return_type return_type_x,
1693 enum tgsi_return_type return_type_y,
1694 enum tgsi_return_type return_type_z,
1695 enum tgsi_return_type return_type_w )
1696 {
1697 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1698
1699 out[0].value = 0;
1700 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1701 out[0].decl.NrTokens = 3;
1702 out[0].decl.File = TGSI_FILE_SAMPLER_VIEW;
1703 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1704
1705 out[1].value = 0;
1706 out[1].decl_range.First = index;
1707 out[1].decl_range.Last = index;
1708
1709 out[2].value = 0;
1710 out[2].decl_sampler_view.Resource = target;
1711 out[2].decl_sampler_view.ReturnTypeX = return_type_x;
1712 out[2].decl_sampler_view.ReturnTypeY = return_type_y;
1713 out[2].decl_sampler_view.ReturnTypeZ = return_type_z;
1714 out[2].decl_sampler_view.ReturnTypeW = return_type_w;
1715 }
1716
1717 static void
1718 emit_decl_image(struct ureg_program *ureg,
1719 unsigned index,
1720 enum tgsi_texture_type target,
1721 enum pipe_format format,
1722 boolean wr,
1723 boolean raw)
1724 {
1725 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1726
1727 out[0].value = 0;
1728 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1729 out[0].decl.NrTokens = 3;
1730 out[0].decl.File = TGSI_FILE_IMAGE;
1731 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1732
1733 out[1].value = 0;
1734 out[1].decl_range.First = index;
1735 out[1].decl_range.Last = index;
1736
1737 out[2].value = 0;
1738 out[2].decl_image.Resource = target;
1739 out[2].decl_image.Writable = wr;
1740 out[2].decl_image.Raw = raw;
1741 out[2].decl_image.Format = format;
1742 }
1743
1744 static void
1745 emit_decl_buffer(struct ureg_program *ureg,
1746 unsigned index,
1747 bool atomic)
1748 {
1749 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1750
1751 out[0].value = 0;
1752 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1753 out[0].decl.NrTokens = 2;
1754 out[0].decl.File = TGSI_FILE_BUFFER;
1755 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1756 out[0].decl.Atomic = atomic;
1757
1758 out[1].value = 0;
1759 out[1].decl_range.First = index;
1760 out[1].decl_range.Last = index;
1761 }
1762
1763 static void
1764 emit_decl_memory(struct ureg_program *ureg, unsigned memory_type)
1765 {
1766 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1767
1768 out[0].value = 0;
1769 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1770 out[0].decl.NrTokens = 2;
1771 out[0].decl.File = TGSI_FILE_MEMORY;
1772 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1773 out[0].decl.MemType = memory_type;
1774
1775 out[1].value = 0;
1776 out[1].decl_range.First = memory_type;
1777 out[1].decl_range.Last = memory_type;
1778 }
1779
1780 static void
1781 emit_immediate( struct ureg_program *ureg,
1782 const unsigned *v,
1783 unsigned type )
1784 {
1785 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 5 );
1786
1787 out[0].value = 0;
1788 out[0].imm.Type = TGSI_TOKEN_TYPE_IMMEDIATE;
1789 out[0].imm.NrTokens = 5;
1790 out[0].imm.DataType = type;
1791 out[0].imm.Padding = 0;
1792
1793 out[1].imm_data.Uint = v[0];
1794 out[2].imm_data.Uint = v[1];
1795 out[3].imm_data.Uint = v[2];
1796 out[4].imm_data.Uint = v[3];
1797 }
1798
1799 static void
1800 emit_property(struct ureg_program *ureg,
1801 unsigned name,
1802 unsigned data)
1803 {
1804 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1805
1806 out[0].value = 0;
1807 out[0].prop.Type = TGSI_TOKEN_TYPE_PROPERTY;
1808 out[0].prop.NrTokens = 2;
1809 out[0].prop.PropertyName = name;
1810
1811 out[1].prop_data.Data = data;
1812 }
1813
1814
1815 static void emit_decls( struct ureg_program *ureg )
1816 {
1817 unsigned i,j;
1818
1819 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++)
1820 if (ureg->properties[i] != ~0u)
1821 emit_property(ureg, i, ureg->properties[i]);
1822
1823 if (ureg->processor == PIPE_SHADER_VERTEX) {
1824 for (i = 0; i < PIPE_MAX_ATTRIBS; i++) {
1825 if (ureg->vs_inputs[i/32] & (1u << (i%32))) {
1826 emit_decl_range( ureg, TGSI_FILE_INPUT, i, 1 );
1827 }
1828 }
1829 } else if (ureg->processor == PIPE_SHADER_FRAGMENT) {
1830 if (ureg->supports_any_inout_decl_range) {
1831 for (i = 0; i < ureg->nr_inputs; i++) {
1832 emit_decl_fs(ureg,
1833 TGSI_FILE_INPUT,
1834 ureg->input[i].first,
1835 ureg->input[i].last,
1836 ureg->input[i].semantic_name,
1837 ureg->input[i].semantic_index,
1838 ureg->input[i].interp,
1839 ureg->input[i].cylindrical_wrap,
1840 ureg->input[i].interp_location,
1841 ureg->input[i].array_id,
1842 ureg->input[i].usage_mask);
1843 }
1844 }
1845 else {
1846 for (i = 0; i < ureg->nr_inputs; i++) {
1847 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1848 emit_decl_fs(ureg,
1849 TGSI_FILE_INPUT,
1850 j, j,
1851 ureg->input[i].semantic_name,
1852 ureg->input[i].semantic_index +
1853 (j - ureg->input[i].first),
1854 ureg->input[i].interp,
1855 ureg->input[i].cylindrical_wrap,
1856 ureg->input[i].interp_location, 0,
1857 ureg->input[i].usage_mask);
1858 }
1859 }
1860 }
1861 } else {
1862 if (ureg->supports_any_inout_decl_range) {
1863 for (i = 0; i < ureg->nr_inputs; i++) {
1864 emit_decl_semantic(ureg,
1865 TGSI_FILE_INPUT,
1866 ureg->input[i].first,
1867 ureg->input[i].last,
1868 ureg->input[i].semantic_name,
1869 ureg->input[i].semantic_index,
1870 0,
1871 TGSI_WRITEMASK_XYZW,
1872 ureg->input[i].array_id);
1873 }
1874 }
1875 else {
1876 for (i = 0; i < ureg->nr_inputs; i++) {
1877 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1878 emit_decl_semantic(ureg,
1879 TGSI_FILE_INPUT,
1880 j, j,
1881 ureg->input[i].semantic_name,
1882 ureg->input[i].semantic_index +
1883 (j - ureg->input[i].first),
1884 0,
1885 TGSI_WRITEMASK_XYZW, 0);
1886 }
1887 }
1888 }
1889 }
1890
1891 for (i = 0; i < ureg->nr_system_values; i++) {
1892 emit_decl_semantic(ureg,
1893 TGSI_FILE_SYSTEM_VALUE,
1894 i,
1895 i,
1896 ureg->system_value[i].semantic_name,
1897 ureg->system_value[i].semantic_index,
1898 0,
1899 TGSI_WRITEMASK_XYZW, 0);
1900 }
1901
1902 if (ureg->supports_any_inout_decl_range) {
1903 for (i = 0; i < ureg->nr_outputs; i++) {
1904 emit_decl_semantic(ureg,
1905 TGSI_FILE_OUTPUT,
1906 ureg->output[i].first,
1907 ureg->output[i].last,
1908 ureg->output[i].semantic_name,
1909 ureg->output[i].semantic_index,
1910 ureg->output[i].streams,
1911 ureg->output[i].usage_mask,
1912 ureg->output[i].array_id);
1913 }
1914 }
1915 else {
1916 for (i = 0; i < ureg->nr_outputs; i++) {
1917 for (j = ureg->output[i].first; j <= ureg->output[i].last; j++) {
1918 emit_decl_semantic(ureg,
1919 TGSI_FILE_OUTPUT,
1920 j, j,
1921 ureg->output[i].semantic_name,
1922 ureg->output[i].semantic_index +
1923 (j - ureg->output[i].first),
1924 ureg->output[i].streams,
1925 ureg->output[i].usage_mask, 0);
1926 }
1927 }
1928 }
1929
1930 for (i = 0; i < ureg->nr_samplers; i++) {
1931 emit_decl_range( ureg,
1932 TGSI_FILE_SAMPLER,
1933 ureg->sampler[i].Index, 1 );
1934 }
1935
1936 for (i = 0; i < ureg->nr_sampler_views; i++) {
1937 emit_decl_sampler_view(ureg,
1938 ureg->sampler_view[i].index,
1939 ureg->sampler_view[i].target,
1940 ureg->sampler_view[i].return_type_x,
1941 ureg->sampler_view[i].return_type_y,
1942 ureg->sampler_view[i].return_type_z,
1943 ureg->sampler_view[i].return_type_w);
1944 }
1945
1946 for (i = 0; i < ureg->nr_images; i++) {
1947 emit_decl_image(ureg,
1948 ureg->image[i].index,
1949 ureg->image[i].target,
1950 ureg->image[i].format,
1951 ureg->image[i].wr,
1952 ureg->image[i].raw);
1953 }
1954
1955 for (i = 0; i < ureg->nr_buffers; i++) {
1956 emit_decl_buffer(ureg, ureg->buffer[i].index, ureg->buffer[i].atomic);
1957 }
1958
1959 for (i = 0; i < TGSI_MEMORY_TYPE_COUNT; i++) {
1960 if (ureg->use_memory[i])
1961 emit_decl_memory(ureg, i);
1962 }
1963
1964 for (i = 0; i < PIPE_MAX_CONSTANT_BUFFERS; i++) {
1965 struct const_decl *decl = &ureg->const_decls[i];
1966
1967 if (decl->nr_constant_ranges) {
1968 uint j;
1969
1970 for (j = 0; j < decl->nr_constant_ranges; j++) {
1971 emit_decl_range2D(ureg,
1972 TGSI_FILE_CONSTANT,
1973 decl->constant_range[j].first,
1974 decl->constant_range[j].last,
1975 i);
1976 }
1977 }
1978 }
1979
1980 for (i = 0; i < PIPE_MAX_HW_ATOMIC_BUFFERS; i++) {
1981 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[i];
1982
1983 if (decl->nr_hw_atomic_ranges) {
1984 uint j;
1985
1986 for (j = 0; j < decl->nr_hw_atomic_ranges; j++) {
1987 emit_decl_atomic_2d(ureg,
1988 decl->hw_atomic_range[j].first,
1989 decl->hw_atomic_range[j].last,
1990 i,
1991 decl->hw_atomic_range[j].array_id);
1992 }
1993 }
1994 }
1995
1996 if (ureg->nr_temps) {
1997 unsigned array = 0;
1998 for (i = 0; i < ureg->nr_temps;) {
1999 boolean local = util_bitmask_get(ureg->local_temps, i);
2000 unsigned first = i;
2001 i = util_bitmask_get_next_index(ureg->decl_temps, i + 1);
2002 if (i == UTIL_BITMASK_INVALID_INDEX)
2003 i = ureg->nr_temps;
2004
2005 if (array < ureg->nr_array_temps && ureg->array_temps[array] == first)
2006 emit_decl_temps( ureg, first, i - 1, local, ++array );
2007 else
2008 emit_decl_temps( ureg, first, i - 1, local, 0 );
2009 }
2010 }
2011
2012 if (ureg->nr_addrs) {
2013 emit_decl_range( ureg,
2014 TGSI_FILE_ADDRESS,
2015 0, ureg->nr_addrs );
2016 }
2017
2018 for (i = 0; i < ureg->nr_immediates; i++) {
2019 emit_immediate( ureg,
2020 ureg->immediate[i].value.u,
2021 ureg->immediate[i].type );
2022 }
2023 }
2024
2025 /* Append the instruction tokens onto the declarations to build a
2026 * contiguous stream suitable to send to the driver.
2027 */
2028 static void copy_instructions( struct ureg_program *ureg )
2029 {
2030 unsigned nr_tokens = ureg->domain[DOMAIN_INSN].count;
2031 union tgsi_any_token *out = get_tokens( ureg,
2032 DOMAIN_DECL,
2033 nr_tokens );
2034
2035 memcpy(out,
2036 ureg->domain[DOMAIN_INSN].tokens,
2037 nr_tokens * sizeof out[0] );
2038 }
2039
2040
2041 static void
2042 fixup_header_size(struct ureg_program *ureg)
2043 {
2044 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_DECL, 0 );
2045
2046 out->header.BodySize = ureg->domain[DOMAIN_DECL].count - 2;
2047 }
2048
2049
2050 static void
2051 emit_header( struct ureg_program *ureg )
2052 {
2053 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
2054
2055 out[0].header.HeaderSize = 2;
2056 out[0].header.BodySize = 0;
2057
2058 out[1].processor.Processor = ureg->processor;
2059 out[1].processor.Padding = 0;
2060 }
2061
2062
2063 const struct tgsi_token *ureg_finalize( struct ureg_program *ureg )
2064 {
2065 const struct tgsi_token *tokens;
2066
2067 switch (ureg->processor) {
2068 case PIPE_SHADER_VERTEX:
2069 case PIPE_SHADER_TESS_EVAL:
2070 ureg_property(ureg, TGSI_PROPERTY_NEXT_SHADER,
2071 ureg->next_shader_processor == -1 ?
2072 PIPE_SHADER_FRAGMENT :
2073 ureg->next_shader_processor);
2074 break;
2075 default:
2076 ; /* nothing */
2077 }
2078
2079 emit_header( ureg );
2080 emit_decls( ureg );
2081 copy_instructions( ureg );
2082 fixup_header_size( ureg );
2083
2084 if (ureg->domain[0].tokens == error_tokens ||
2085 ureg->domain[1].tokens == error_tokens) {
2086 debug_printf("%s: error in generated shader\n", __FUNCTION__);
2087 assert(0);
2088 return NULL;
2089 }
2090
2091 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
2092
2093 if (0) {
2094 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__,
2095 ureg->domain[DOMAIN_DECL].count);
2096 tgsi_dump( tokens, 0 );
2097 }
2098
2099 #if DEBUG
2100 if (tokens && !tgsi_sanity_check(tokens)) {
2101 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
2102 tgsi_dump(tokens, 0);
2103 assert(0);
2104 }
2105 #endif
2106
2107
2108 return tokens;
2109 }
2110
2111
2112 void *ureg_create_shader( struct ureg_program *ureg,
2113 struct pipe_context *pipe,
2114 const struct pipe_stream_output_info *so )
2115 {
2116 struct pipe_shader_state state;
2117
2118 pipe_shader_state_from_tgsi(&state, ureg_finalize(ureg));
2119 if(!state.tokens)
2120 return NULL;
2121
2122 if (so)
2123 state.stream_output = *so;
2124
2125 switch (ureg->processor) {
2126 case PIPE_SHADER_VERTEX:
2127 return pipe->create_vs_state(pipe, &state);
2128 case PIPE_SHADER_TESS_CTRL:
2129 return pipe->create_tcs_state(pipe, &state);
2130 case PIPE_SHADER_TESS_EVAL:
2131 return pipe->create_tes_state(pipe, &state);
2132 case PIPE_SHADER_GEOMETRY:
2133 return pipe->create_gs_state(pipe, &state);
2134 case PIPE_SHADER_FRAGMENT:
2135 return pipe->create_fs_state(pipe, &state);
2136 default:
2137 return NULL;
2138 }
2139 }
2140
2141
2142 const struct tgsi_token *ureg_get_tokens( struct ureg_program *ureg,
2143 unsigned *nr_tokens )
2144 {
2145 const struct tgsi_token *tokens;
2146
2147 ureg_finalize(ureg);
2148
2149 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
2150
2151 if (nr_tokens)
2152 *nr_tokens = ureg->domain[DOMAIN_DECL].count;
2153
2154 ureg->domain[DOMAIN_DECL].tokens = 0;
2155 ureg->domain[DOMAIN_DECL].size = 0;
2156 ureg->domain[DOMAIN_DECL].order = 0;
2157 ureg->domain[DOMAIN_DECL].count = 0;
2158
2159 return tokens;
2160 }
2161
2162
2163 void ureg_free_tokens( const struct tgsi_token *tokens )
2164 {
2165 FREE((struct tgsi_token *)tokens);
2166 }
2167
2168
2169 struct ureg_program *
2170 ureg_create(enum pipe_shader_type processor)
2171 {
2172 return ureg_create_with_screen(processor, NULL);
2173 }
2174
2175
2176 struct ureg_program *
2177 ureg_create_with_screen(enum pipe_shader_type processor,
2178 struct pipe_screen *screen)
2179 {
2180 uint i;
2181 struct ureg_program *ureg = CALLOC_STRUCT( ureg_program );
2182 if (!ureg)
2183 goto no_ureg;
2184
2185 ureg->processor = processor;
2186 ureg->supports_any_inout_decl_range =
2187 screen &&
2188 screen->get_shader_param(screen, processor,
2189 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE) != 0;
2190 ureg->next_shader_processor = -1;
2191
2192 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++)
2193 ureg->properties[i] = ~0;
2194
2195 ureg->free_temps = util_bitmask_create();
2196 if (ureg->free_temps == NULL)
2197 goto no_free_temps;
2198
2199 ureg->local_temps = util_bitmask_create();
2200 if (ureg->local_temps == NULL)
2201 goto no_local_temps;
2202
2203 ureg->decl_temps = util_bitmask_create();
2204 if (ureg->decl_temps == NULL)
2205 goto no_decl_temps;
2206
2207 return ureg;
2208
2209 no_decl_temps:
2210 util_bitmask_destroy(ureg->local_temps);
2211 no_local_temps:
2212 util_bitmask_destroy(ureg->free_temps);
2213 no_free_temps:
2214 FREE(ureg);
2215 no_ureg:
2216 return NULL;
2217 }
2218
2219
2220 void
2221 ureg_set_next_shader_processor(struct ureg_program *ureg, unsigned processor)
2222 {
2223 ureg->next_shader_processor = processor;
2224 }
2225
2226
2227 unsigned
2228 ureg_get_nr_outputs( const struct ureg_program *ureg )
2229 {
2230 if (!ureg)
2231 return 0;
2232 return ureg->nr_outputs;
2233 }
2234
2235
2236 void ureg_destroy( struct ureg_program *ureg )
2237 {
2238 unsigned i;
2239
2240 for (i = 0; i < ARRAY_SIZE(ureg->domain); i++) {
2241 if (ureg->domain[i].tokens &&
2242 ureg->domain[i].tokens != error_tokens)
2243 FREE(ureg->domain[i].tokens);
2244 }
2245
2246 util_bitmask_destroy(ureg->free_temps);
2247 util_bitmask_destroy(ureg->local_temps);
2248 util_bitmask_destroy(ureg->decl_temps);
2249
2250 FREE(ureg);
2251 }