1 /**************************************************************************
3 * Copyright 2009-2010 VMware, Inc.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_from_mesa.h"
35 #include "tgsi/tgsi_info.h"
36 #include "tgsi/tgsi_dump.h"
37 #include "tgsi/tgsi_sanity.h"
38 #include "util/u_debug.h"
39 #include "util/u_inlines.h"
40 #include "util/u_memory.h"
41 #include "util/u_math.h"
42 #include "util/u_bitmask.h"
44 #include "compiler/shader_info.h"
46 union tgsi_any_token
{
47 struct tgsi_header header
;
48 struct tgsi_processor processor
;
49 struct tgsi_token token
;
50 struct tgsi_property prop
;
51 struct tgsi_property_data prop_data
;
52 struct tgsi_declaration decl
;
53 struct tgsi_declaration_range decl_range
;
54 struct tgsi_declaration_dimension decl_dim
;
55 struct tgsi_declaration_interp decl_interp
;
56 struct tgsi_declaration_image decl_image
;
57 struct tgsi_declaration_semantic decl_semantic
;
58 struct tgsi_declaration_sampler_view decl_sampler_view
;
59 struct tgsi_declaration_array array
;
60 struct tgsi_immediate imm
;
61 union tgsi_immediate_data imm_data
;
62 struct tgsi_instruction insn
;
63 struct tgsi_instruction_label insn_label
;
64 struct tgsi_instruction_texture insn_texture
;
65 struct tgsi_instruction_memory insn_memory
;
66 struct tgsi_texture_offset insn_texture_offset
;
67 struct tgsi_src_register src
;
68 struct tgsi_ind_register ind
;
69 struct tgsi_dimension dim
;
70 struct tgsi_dst_register dst
;
76 union tgsi_any_token
*tokens
;
82 #define UREG_MAX_INPUT (4 * PIPE_MAX_SHADER_INPUTS)
83 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
84 #define UREG_MAX_OUTPUT (4 * PIPE_MAX_SHADER_OUTPUTS)
85 #define UREG_MAX_CONSTANT_RANGE 32
86 #define UREG_MAX_HW_ATOMIC_RANGE 32
87 #define UREG_MAX_IMMEDIATE 4096
88 #define UREG_MAX_ADDR 3
89 #define UREG_MAX_ARRAY_TEMPS 256
95 } constant_range
[UREG_MAX_CONSTANT_RANGE
];
96 unsigned nr_constant_ranges
;
99 struct hw_atomic_decl
{
104 } hw_atomic_range
[UREG_MAX_HW_ATOMIC_RANGE
];
105 unsigned nr_hw_atomic_ranges
;
108 #define DOMAIN_DECL 0
109 #define DOMAIN_INSN 1
113 enum pipe_shader_type processor
;
114 bool supports_any_inout_decl_range
;
115 int next_shader_processor
;
118 enum tgsi_semantic semantic_name
;
119 unsigned semantic_index
;
120 enum tgsi_interpolate_mode interp
;
121 unsigned char cylindrical_wrap
;
122 unsigned char usage_mask
;
123 enum tgsi_interpolate_loc interp_location
;
127 } input
[UREG_MAX_INPUT
];
128 unsigned nr_inputs
, nr_input_regs
;
130 unsigned vs_inputs
[PIPE_MAX_ATTRIBS
/32];
133 enum tgsi_semantic semantic_name
;
134 unsigned semantic_index
;
135 } system_value
[UREG_MAX_SYSTEM_VALUE
];
136 unsigned nr_system_values
;
139 enum tgsi_semantic semantic_name
;
140 unsigned semantic_index
;
142 unsigned usage_mask
; /* = TGSI_WRITEMASK_* */
147 } output
[UREG_MAX_OUTPUT
];
148 unsigned nr_outputs
, nr_output_regs
;
158 } immediate
[UREG_MAX_IMMEDIATE
];
159 unsigned nr_immediates
;
161 struct ureg_src sampler
[PIPE_MAX_SAMPLERS
];
162 unsigned nr_samplers
;
166 enum tgsi_texture_type target
;
167 enum tgsi_return_type return_type_x
;
168 enum tgsi_return_type return_type_y
;
169 enum tgsi_return_type return_type_z
;
170 enum tgsi_return_type return_type_w
;
171 } sampler_view
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
172 unsigned nr_sampler_views
;
176 enum tgsi_texture_type target
;
177 enum pipe_format format
;
180 } image
[PIPE_MAX_SHADER_IMAGES
];
186 } buffer
[PIPE_MAX_SHADER_BUFFERS
];
189 struct util_bitmask
*free_temps
;
190 struct util_bitmask
*local_temps
;
191 struct util_bitmask
*decl_temps
;
194 unsigned array_temps
[UREG_MAX_ARRAY_TEMPS
];
195 unsigned nr_array_temps
;
197 struct const_decl const_decls
[PIPE_MAX_CONSTANT_BUFFERS
];
199 struct hw_atomic_decl hw_atomic_decls
[PIPE_MAX_HW_ATOMIC_BUFFERS
];
201 unsigned properties
[TGSI_PROPERTY_COUNT
];
204 unsigned nr_instructions
;
206 struct ureg_tokens domain
[2];
208 bool use_memory
[TGSI_MEMORY_TYPE_COUNT
];
211 static union tgsi_any_token error_tokens
[32];
213 static void tokens_error( struct ureg_tokens
*tokens
)
215 if (tokens
->tokens
&& tokens
->tokens
!= error_tokens
)
216 FREE(tokens
->tokens
);
218 tokens
->tokens
= error_tokens
;
219 tokens
->size
= ARRAY_SIZE(error_tokens
);
224 static void tokens_expand( struct ureg_tokens
*tokens
,
227 unsigned old_size
= tokens
->size
* sizeof(unsigned);
229 if (tokens
->tokens
== error_tokens
) {
233 while (tokens
->count
+ count
> tokens
->size
) {
234 tokens
->size
= (1 << ++tokens
->order
);
237 tokens
->tokens
= REALLOC(tokens
->tokens
,
239 tokens
->size
* sizeof(unsigned));
240 if (tokens
->tokens
== NULL
) {
241 tokens_error(tokens
);
245 static void set_bad( struct ureg_program
*ureg
)
247 tokens_error(&ureg
->domain
[0]);
252 static union tgsi_any_token
*get_tokens( struct ureg_program
*ureg
,
256 struct ureg_tokens
*tokens
= &ureg
->domain
[domain
];
257 union tgsi_any_token
*result
;
259 if (tokens
->count
+ count
> tokens
->size
)
260 tokens_expand(tokens
, count
);
262 result
= &tokens
->tokens
[tokens
->count
];
263 tokens
->count
+= count
;
268 static union tgsi_any_token
*retrieve_token( struct ureg_program
*ureg
,
272 if (ureg
->domain
[domain
].tokens
== error_tokens
)
273 return &error_tokens
[0];
275 return &ureg
->domain
[domain
].tokens
[nr
];
280 ureg_property(struct ureg_program
*ureg
, unsigned name
, unsigned value
)
282 assert(name
< ARRAY_SIZE(ureg
->properties
));
283 ureg
->properties
[name
] = value
;
287 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program
*ureg
,
288 enum tgsi_semantic semantic_name
,
289 unsigned semantic_index
,
290 enum tgsi_interpolate_mode interp_mode
,
291 unsigned cylindrical_wrap
,
292 enum tgsi_interpolate_loc interp_location
,
300 assert(usage_mask
!= 0);
301 assert(usage_mask
<= TGSI_WRITEMASK_XYZW
);
303 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
304 if (ureg
->input
[i
].semantic_name
== semantic_name
&&
305 ureg
->input
[i
].semantic_index
== semantic_index
) {
306 assert(ureg
->input
[i
].interp
== interp_mode
);
307 assert(ureg
->input
[i
].cylindrical_wrap
== cylindrical_wrap
);
308 assert(ureg
->input
[i
].interp_location
== interp_location
);
309 if (ureg
->input
[i
].array_id
== array_id
) {
310 ureg
->input
[i
].usage_mask
|= usage_mask
;
313 assert((ureg
->input
[i
].usage_mask
& usage_mask
) == 0);
317 if (ureg
->nr_inputs
< UREG_MAX_INPUT
) {
318 assert(array_size
>= 1);
319 ureg
->input
[i
].semantic_name
= semantic_name
;
320 ureg
->input
[i
].semantic_index
= semantic_index
;
321 ureg
->input
[i
].interp
= interp_mode
;
322 ureg
->input
[i
].cylindrical_wrap
= cylindrical_wrap
;
323 ureg
->input
[i
].interp_location
= interp_location
;
324 ureg
->input
[i
].first
= index
;
325 ureg
->input
[i
].last
= index
+ array_size
- 1;
326 ureg
->input
[i
].array_id
= array_id
;
327 ureg
->input
[i
].usage_mask
= usage_mask
;
328 ureg
->nr_input_regs
= MAX2(ureg
->nr_input_regs
, index
+ array_size
);
335 return ureg_src_array_register(TGSI_FILE_INPUT
, ureg
->input
[i
].first
,
340 ureg_DECL_fs_input_cyl_centroid(struct ureg_program
*ureg
,
341 enum tgsi_semantic semantic_name
,
342 unsigned semantic_index
,
343 enum tgsi_interpolate_mode interp_mode
,
344 unsigned cylindrical_wrap
,
345 enum tgsi_interpolate_loc interp_location
,
349 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
350 semantic_name
, semantic_index
, interp_mode
,
351 cylindrical_wrap
, interp_location
,
352 ureg
->nr_input_regs
, TGSI_WRITEMASK_XYZW
, array_id
, array_size
);
357 ureg_DECL_vs_input( struct ureg_program
*ureg
,
360 assert(ureg
->processor
== PIPE_SHADER_VERTEX
);
361 assert(index
/ 32 < ARRAY_SIZE(ureg
->vs_inputs
));
363 ureg
->vs_inputs
[index
/32] |= 1 << (index
% 32);
364 return ureg_src_register( TGSI_FILE_INPUT
, index
);
369 ureg_DECL_input_layout(struct ureg_program
*ureg
,
370 enum tgsi_semantic semantic_name
,
371 unsigned semantic_index
,
377 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
378 semantic_name
, semantic_index
,
379 TGSI_INTERPOLATE_CONSTANT
, 0, TGSI_INTERPOLATE_LOC_CENTER
,
380 index
, usage_mask
, array_id
, array_size
);
385 ureg_DECL_input(struct ureg_program
*ureg
,
386 enum tgsi_semantic semantic_name
,
387 unsigned semantic_index
,
391 return ureg_DECL_fs_input_cyl_centroid(ureg
, semantic_name
, semantic_index
,
392 TGSI_INTERPOLATE_CONSTANT
, 0,
393 TGSI_INTERPOLATE_LOC_CENTER
,
394 array_id
, array_size
);
399 ureg_DECL_system_value(struct ureg_program
*ureg
,
400 enum tgsi_semantic semantic_name
,
401 unsigned semantic_index
)
405 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
406 if (ureg
->system_value
[i
].semantic_name
== semantic_name
&&
407 ureg
->system_value
[i
].semantic_index
== semantic_index
) {
412 if (ureg
->nr_system_values
< UREG_MAX_SYSTEM_VALUE
) {
413 ureg
->system_value
[ureg
->nr_system_values
].semantic_name
= semantic_name
;
414 ureg
->system_value
[ureg
->nr_system_values
].semantic_index
= semantic_index
;
415 i
= ureg
->nr_system_values
;
416 ureg
->nr_system_values
++;
422 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE
, i
);
427 ureg_DECL_output_layout(struct ureg_program
*ureg
,
428 enum tgsi_semantic semantic_name
,
429 unsigned semantic_index
,
439 assert(usage_mask
!= 0);
440 assert(!(streams
& 0x03) || (usage_mask
& 1));
441 assert(!(streams
& 0x0c) || (usage_mask
& 2));
442 assert(!(streams
& 0x30) || (usage_mask
& 4));
443 assert(!(streams
& 0xc0) || (usage_mask
& 8));
445 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
446 if (ureg
->output
[i
].semantic_name
== semantic_name
&&
447 ureg
->output
[i
].semantic_index
== semantic_index
) {
448 if (ureg
->output
[i
].array_id
== array_id
) {
449 ureg
->output
[i
].usage_mask
|= usage_mask
;
452 assert((ureg
->output
[i
].usage_mask
& usage_mask
) == 0);
456 if (ureg
->nr_outputs
< UREG_MAX_OUTPUT
) {
457 ureg
->output
[i
].semantic_name
= semantic_name
;
458 ureg
->output
[i
].semantic_index
= semantic_index
;
459 ureg
->output
[i
].usage_mask
= usage_mask
;
460 ureg
->output
[i
].first
= index
;
461 ureg
->output
[i
].last
= index
+ array_size
- 1;
462 ureg
->output
[i
].array_id
= array_id
;
463 ureg
->output
[i
].invariant
= invariant
;
464 ureg
->nr_output_regs
= MAX2(ureg
->nr_output_regs
, index
+ array_size
);
473 ureg
->output
[i
].streams
|= streams
;
475 return ureg_dst_array_register(TGSI_FILE_OUTPUT
, ureg
->output
[i
].first
,
481 ureg_DECL_output_masked(struct ureg_program
*ureg
,
488 return ureg_DECL_output_layout(ureg
, name
, index
, 0,
489 ureg
->nr_output_regs
, usage_mask
, array_id
,
495 ureg_DECL_output(struct ureg_program
*ureg
,
496 enum tgsi_semantic name
,
499 return ureg_DECL_output_masked(ureg
, name
, index
, TGSI_WRITEMASK_XYZW
,
504 ureg_DECL_output_array(struct ureg_program
*ureg
,
505 enum tgsi_semantic semantic_name
,
506 unsigned semantic_index
,
510 return ureg_DECL_output_masked(ureg
, semantic_name
, semantic_index
,
512 array_id
, array_size
);
516 /* Returns a new constant register. Keep track of which have been
517 * referred to so that we can emit decls later.
519 * Constant operands declared with this function must be addressed
520 * with a two-dimensional index.
522 * There is nothing in this code to bind this constant to any tracked
523 * value or manage any constant_buffer contents -- that's the
524 * resposibility of the calling code.
527 ureg_DECL_constant2D(struct ureg_program
*ureg
,
532 struct const_decl
*decl
= &ureg
->const_decls
[index2D
];
534 assert(index2D
< PIPE_MAX_CONSTANT_BUFFERS
);
536 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
537 uint i
= decl
->nr_constant_ranges
++;
539 decl
->constant_range
[i
].first
= first
;
540 decl
->constant_range
[i
].last
= last
;
545 /* A one-dimensional, deprecated version of ureg_DECL_constant2D().
547 * Constant operands declared with this function must be addressed
548 * with a one-dimensional index.
551 ureg_DECL_constant(struct ureg_program
*ureg
,
554 struct const_decl
*decl
= &ureg
->const_decls
[0];
555 unsigned minconst
= index
, maxconst
= index
;
558 /* Inside existing range?
560 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
561 if (decl
->constant_range
[i
].first
<= index
&&
562 decl
->constant_range
[i
].last
>= index
) {
567 /* Extend existing range?
569 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
570 if (decl
->constant_range
[i
].last
== index
- 1) {
571 decl
->constant_range
[i
].last
= index
;
575 if (decl
->constant_range
[i
].first
== index
+ 1) {
576 decl
->constant_range
[i
].first
= index
;
580 minconst
= MIN2(minconst
, decl
->constant_range
[i
].first
);
581 maxconst
= MAX2(maxconst
, decl
->constant_range
[i
].last
);
586 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
587 i
= decl
->nr_constant_ranges
++;
588 decl
->constant_range
[i
].first
= index
;
589 decl
->constant_range
[i
].last
= index
;
593 /* Collapse all ranges down to one:
596 decl
->constant_range
[0].first
= minconst
;
597 decl
->constant_range
[0].last
= maxconst
;
598 decl
->nr_constant_ranges
= 1;
601 assert(i
< decl
->nr_constant_ranges
);
602 assert(decl
->constant_range
[i
].first
<= index
);
603 assert(decl
->constant_range
[i
].last
>= index
);
605 struct ureg_src src
= ureg_src_register(TGSI_FILE_CONSTANT
, index
);
606 return ureg_src_dimension(src
, 0);
610 /* Returns a new hw atomic register. Keep track of which have been
611 * referred to so that we can emit decls later.
614 ureg_DECL_hw_atomic(struct ureg_program
*ureg
,
620 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[buffer_id
];
622 if (decl
->nr_hw_atomic_ranges
< UREG_MAX_HW_ATOMIC_RANGE
) {
623 uint i
= decl
->nr_hw_atomic_ranges
++;
625 decl
->hw_atomic_range
[i
].first
= first
;
626 decl
->hw_atomic_range
[i
].last
= last
;
627 decl
->hw_atomic_range
[i
].array_id
= array_id
;
633 static struct ureg_dst
alloc_temporary( struct ureg_program
*ureg
,
638 /* Look for a released temporary.
640 for (i
= util_bitmask_get_first_index(ureg
->free_temps
);
641 i
!= UTIL_BITMASK_INVALID_INDEX
;
642 i
= util_bitmask_get_next_index(ureg
->free_temps
, i
+ 1)) {
643 if (util_bitmask_get(ureg
->local_temps
, i
) == local
)
647 /* Or allocate a new one.
649 if (i
== UTIL_BITMASK_INVALID_INDEX
) {
650 i
= ureg
->nr_temps
++;
653 util_bitmask_set(ureg
->local_temps
, i
);
655 /* Start a new declaration when the local flag changes */
656 if (!i
|| util_bitmask_get(ureg
->local_temps
, i
- 1) != local
)
657 util_bitmask_set(ureg
->decl_temps
, i
);
660 util_bitmask_clear(ureg
->free_temps
, i
);
662 return ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
665 struct ureg_dst
ureg_DECL_temporary( struct ureg_program
*ureg
)
667 return alloc_temporary(ureg
, FALSE
);
670 struct ureg_dst
ureg_DECL_local_temporary( struct ureg_program
*ureg
)
672 return alloc_temporary(ureg
, TRUE
);
675 struct ureg_dst
ureg_DECL_array_temporary( struct ureg_program
*ureg
,
679 unsigned i
= ureg
->nr_temps
;
680 struct ureg_dst dst
= ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
683 util_bitmask_set(ureg
->local_temps
, i
);
685 /* Always start a new declaration at the start */
686 util_bitmask_set(ureg
->decl_temps
, i
);
688 ureg
->nr_temps
+= size
;
690 /* and also at the end of the array */
691 util_bitmask_set(ureg
->decl_temps
, ureg
->nr_temps
);
693 if (ureg
->nr_array_temps
< UREG_MAX_ARRAY_TEMPS
) {
694 ureg
->array_temps
[ureg
->nr_array_temps
++] = i
;
695 dst
.ArrayID
= ureg
->nr_array_temps
;
701 void ureg_release_temporary( struct ureg_program
*ureg
,
702 struct ureg_dst tmp
)
704 if(tmp
.File
== TGSI_FILE_TEMPORARY
)
705 util_bitmask_set(ureg
->free_temps
, tmp
.Index
);
709 /* Allocate a new address register.
711 struct ureg_dst
ureg_DECL_address( struct ureg_program
*ureg
)
713 if (ureg
->nr_addrs
< UREG_MAX_ADDR
)
714 return ureg_dst_register( TGSI_FILE_ADDRESS
, ureg
->nr_addrs
++ );
717 return ureg_dst_register( TGSI_FILE_ADDRESS
, 0 );
720 /* Allocate a new sampler.
722 struct ureg_src
ureg_DECL_sampler( struct ureg_program
*ureg
,
727 for (i
= 0; i
< ureg
->nr_samplers
; i
++)
728 if (ureg
->sampler
[i
].Index
== (int)nr
)
729 return ureg
->sampler
[i
];
731 if (i
< PIPE_MAX_SAMPLERS
) {
732 ureg
->sampler
[i
] = ureg_src_register( TGSI_FILE_SAMPLER
, nr
);
734 return ureg
->sampler
[i
];
738 return ureg
->sampler
[0];
742 * Allocate a new shader sampler view.
745 ureg_DECL_sampler_view(struct ureg_program
*ureg
,
747 enum tgsi_texture_type target
,
748 enum tgsi_return_type return_type_x
,
749 enum tgsi_return_type return_type_y
,
750 enum tgsi_return_type return_type_z
,
751 enum tgsi_return_type return_type_w
)
753 struct ureg_src reg
= ureg_src_register(TGSI_FILE_SAMPLER_VIEW
, index
);
756 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
757 if (ureg
->sampler_view
[i
].index
== index
) {
762 if (i
< PIPE_MAX_SHADER_SAMPLER_VIEWS
) {
763 ureg
->sampler_view
[i
].index
= index
;
764 ureg
->sampler_view
[i
].target
= target
;
765 ureg
->sampler_view
[i
].return_type_x
= return_type_x
;
766 ureg
->sampler_view
[i
].return_type_y
= return_type_y
;
767 ureg
->sampler_view
[i
].return_type_z
= return_type_z
;
768 ureg
->sampler_view
[i
].return_type_w
= return_type_w
;
769 ureg
->nr_sampler_views
++;
777 /* Allocate a new image.
780 ureg_DECL_image(struct ureg_program
*ureg
,
782 enum tgsi_texture_type target
,
783 enum pipe_format format
,
787 struct ureg_src reg
= ureg_src_register(TGSI_FILE_IMAGE
, index
);
790 for (i
= 0; i
< ureg
->nr_images
; i
++)
791 if (ureg
->image
[i
].index
== index
)
794 if (i
< PIPE_MAX_SHADER_IMAGES
) {
795 ureg
->image
[i
].index
= index
;
796 ureg
->image
[i
].target
= target
;
797 ureg
->image
[i
].wr
= wr
;
798 ureg
->image
[i
].raw
= raw
;
799 ureg
->image
[i
].format
= format
;
808 /* Allocate a new buffer.
810 struct ureg_src
ureg_DECL_buffer(struct ureg_program
*ureg
, unsigned nr
,
813 struct ureg_src reg
= ureg_src_register(TGSI_FILE_BUFFER
, nr
);
816 for (i
= 0; i
< ureg
->nr_buffers
; i
++)
817 if (ureg
->buffer
[i
].index
== nr
)
820 if (i
< PIPE_MAX_SHADER_BUFFERS
) {
821 ureg
->buffer
[i
].index
= nr
;
822 ureg
->buffer
[i
].atomic
= atomic
;
831 /* Allocate a memory area.
833 struct ureg_src
ureg_DECL_memory(struct ureg_program
*ureg
,
834 unsigned memory_type
)
836 struct ureg_src reg
= ureg_src_register(TGSI_FILE_MEMORY
, memory_type
);
838 ureg
->use_memory
[memory_type
] = true;
843 match_or_expand_immediate64( const unsigned *v
,
849 unsigned nr2
= *pnr2
;
853 for (i
= 0; i
< nr
; i
+= 2) {
854 boolean found
= FALSE
;
856 for (j
= 0; j
< nr2
&& !found
; j
+= 2) {
857 if (v
[i
] == v2
[j
] && v
[i
+ 1] == v2
[j
+ 1]) {
858 *swizzle
|= (j
<< (i
* 2)) | ((j
+ 1) << ((i
+ 1) * 2));
868 v2
[nr2
+ 1] = v
[i
+ 1];
870 *swizzle
|= (nr2
<< (i
* 2)) | ((nr2
+ 1) << ((i
+ 1) * 2));
875 /* Actually expand immediate only when fully succeeded.
882 match_or_expand_immediate( const unsigned *v
,
889 unsigned nr2
= *pnr2
;
892 if (type
== TGSI_IMM_FLOAT64
||
893 type
== TGSI_IMM_UINT64
||
894 type
== TGSI_IMM_INT64
)
895 return match_or_expand_immediate64(v
, nr
, v2
, pnr2
, swizzle
);
899 for (i
= 0; i
< nr
; i
++) {
900 boolean found
= FALSE
;
902 for (j
= 0; j
< nr2
&& !found
; j
++) {
904 *swizzle
|= j
<< (i
* 2);
915 *swizzle
|= nr2
<< (i
* 2);
920 /* Actually expand immediate only when fully succeeded.
927 static struct ureg_src
928 decl_immediate( struct ureg_program
*ureg
,
934 unsigned swizzle
= 0;
936 /* Could do a first pass where we examine all existing immediates
940 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
941 if (ureg
->immediate
[i
].type
!= type
) {
944 if (match_or_expand_immediate(v
,
947 ureg
->immediate
[i
].value
.u
,
948 &ureg
->immediate
[i
].nr
,
954 if (ureg
->nr_immediates
< UREG_MAX_IMMEDIATE
) {
955 i
= ureg
->nr_immediates
++;
956 ureg
->immediate
[i
].type
= type
;
957 if (match_or_expand_immediate(v
,
960 ureg
->immediate
[i
].value
.u
,
961 &ureg
->immediate
[i
].nr
,
970 /* Make sure that all referenced elements are from this immediate.
971 * Has the effect of making size-one immediates into scalars.
973 if (type
== TGSI_IMM_FLOAT64
||
974 type
== TGSI_IMM_UINT64
||
975 type
== TGSI_IMM_INT64
) {
976 for (j
= nr
; j
< 4; j
+=2) {
977 swizzle
|= (swizzle
& 0xf) << (j
* 2);
980 for (j
= nr
; j
< 4; j
++) {
981 swizzle
|= (swizzle
& 0x3) << (j
* 2);
984 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE
, i
),
985 (swizzle
>> 0) & 0x3,
986 (swizzle
>> 2) & 0x3,
987 (swizzle
>> 4) & 0x3,
988 (swizzle
>> 6) & 0x3);
993 ureg_DECL_immediate( struct ureg_program
*ureg
,
1003 for (i
= 0; i
< nr
; i
++) {
1007 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT32
);
1011 ureg_DECL_immediate_f64( struct ureg_program
*ureg
,
1021 assert((nr
/ 2) < 3);
1022 for (i
= 0; i
< nr
/ 2; i
++) {
1026 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT64
);
1030 ureg_DECL_immediate_uint( struct ureg_program
*ureg
,
1034 return decl_immediate(ureg
, v
, nr
, TGSI_IMM_UINT32
);
1039 ureg_DECL_immediate_block_uint( struct ureg_program
*ureg
,
1046 if (ureg
->nr_immediates
+ (nr
+ 3) / 4 > UREG_MAX_IMMEDIATE
) {
1048 return ureg_src_register(TGSI_FILE_IMMEDIATE
, 0);
1051 index
= ureg
->nr_immediates
;
1052 ureg
->nr_immediates
+= (nr
+ 3) / 4;
1054 for (i
= index
; i
< ureg
->nr_immediates
; i
++) {
1055 ureg
->immediate
[i
].type
= TGSI_IMM_UINT32
;
1056 ureg
->immediate
[i
].nr
= nr
> 4 ? 4 : nr
;
1057 memcpy(ureg
->immediate
[i
].value
.u
,
1058 &v
[(i
- index
) * 4],
1059 ureg
->immediate
[i
].nr
* sizeof(uint
));
1063 return ureg_src_register(TGSI_FILE_IMMEDIATE
, index
);
1068 ureg_DECL_immediate_int( struct ureg_program
*ureg
,
1072 return decl_immediate(ureg
, (const unsigned *)v
, nr
, TGSI_IMM_INT32
);
1076 ureg_DECL_immediate_uint64( struct ureg_program
*ureg
,
1086 assert((nr
/ 2) < 3);
1087 for (i
= 0; i
< nr
/ 2; i
++) {
1091 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_UINT64
);
1095 ureg_DECL_immediate_int64( struct ureg_program
*ureg
,
1105 assert((nr
/ 2) < 3);
1106 for (i
= 0; i
< nr
/ 2; i
++) {
1110 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_INT64
);
1114 ureg_emit_src( struct ureg_program
*ureg
,
1115 struct ureg_src src
)
1117 unsigned size
= 1 + (src
.Indirect
? 1 : 0) +
1118 (src
.Dimension
? (src
.DimIndirect
? 2 : 1) : 0);
1120 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1123 assert(src
.File
!= TGSI_FILE_NULL
);
1124 assert(src
.File
< TGSI_FILE_COUNT
);
1127 out
[n
].src
.File
= src
.File
;
1128 out
[n
].src
.SwizzleX
= src
.SwizzleX
;
1129 out
[n
].src
.SwizzleY
= src
.SwizzleY
;
1130 out
[n
].src
.SwizzleZ
= src
.SwizzleZ
;
1131 out
[n
].src
.SwizzleW
= src
.SwizzleW
;
1132 out
[n
].src
.Index
= src
.Index
;
1133 out
[n
].src
.Negate
= src
.Negate
;
1134 out
[0].src
.Absolute
= src
.Absolute
;
1138 out
[0].src
.Indirect
= 1;
1140 out
[n
].ind
.File
= src
.IndirectFile
;
1141 out
[n
].ind
.Swizzle
= src
.IndirectSwizzle
;
1142 out
[n
].ind
.Index
= src
.IndirectIndex
;
1143 if (!ureg
->supports_any_inout_decl_range
&&
1144 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1145 out
[n
].ind
.ArrayID
= 0;
1147 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1151 if (src
.Dimension
) {
1152 out
[0].src
.Dimension
= 1;
1153 out
[n
].dim
.Dimension
= 0;
1154 out
[n
].dim
.Padding
= 0;
1155 if (src
.DimIndirect
) {
1156 out
[n
].dim
.Indirect
= 1;
1157 out
[n
].dim
.Index
= src
.DimensionIndex
;
1160 out
[n
].ind
.File
= src
.DimIndFile
;
1161 out
[n
].ind
.Swizzle
= src
.DimIndSwizzle
;
1162 out
[n
].ind
.Index
= src
.DimIndIndex
;
1163 if (!ureg
->supports_any_inout_decl_range
&&
1164 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1165 out
[n
].ind
.ArrayID
= 0;
1167 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1169 out
[n
].dim
.Indirect
= 0;
1170 out
[n
].dim
.Index
= src
.DimensionIndex
;
1180 ureg_emit_dst( struct ureg_program
*ureg
,
1181 struct ureg_dst dst
)
1183 unsigned size
= 1 + (dst
.Indirect
? 1 : 0) +
1184 (dst
.Dimension
? (dst
.DimIndirect
? 2 : 1) : 0);
1186 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1189 assert(dst
.File
!= TGSI_FILE_NULL
);
1190 assert(dst
.File
!= TGSI_FILE_SAMPLER
);
1191 assert(dst
.File
!= TGSI_FILE_SAMPLER_VIEW
);
1192 assert(dst
.File
!= TGSI_FILE_IMMEDIATE
);
1193 assert(dst
.File
< TGSI_FILE_COUNT
);
1196 out
[n
].dst
.File
= dst
.File
;
1197 out
[n
].dst
.WriteMask
= dst
.WriteMask
;
1198 out
[n
].dst
.Indirect
= dst
.Indirect
;
1199 out
[n
].dst
.Index
= dst
.Index
;
1204 out
[n
].ind
.File
= dst
.IndirectFile
;
1205 out
[n
].ind
.Swizzle
= dst
.IndirectSwizzle
;
1206 out
[n
].ind
.Index
= dst
.IndirectIndex
;
1207 if (!ureg
->supports_any_inout_decl_range
&&
1208 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1209 out
[n
].ind
.ArrayID
= 0;
1211 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1215 if (dst
.Dimension
) {
1216 out
[0].dst
.Dimension
= 1;
1217 out
[n
].dim
.Dimension
= 0;
1218 out
[n
].dim
.Padding
= 0;
1219 if (dst
.DimIndirect
) {
1220 out
[n
].dim
.Indirect
= 1;
1221 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1224 out
[n
].ind
.File
= dst
.DimIndFile
;
1225 out
[n
].ind
.Swizzle
= dst
.DimIndSwizzle
;
1226 out
[n
].ind
.Index
= dst
.DimIndIndex
;
1227 if (!ureg
->supports_any_inout_decl_range
&&
1228 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1229 out
[n
].ind
.ArrayID
= 0;
1231 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1233 out
[n
].dim
.Indirect
= 0;
1234 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1243 static void validate( enum tgsi_opcode opcode
,
1248 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info( opcode
);
1251 assert(nr_dst
== info
->num_dst
);
1252 assert(nr_src
== info
->num_src
);
1257 struct ureg_emit_insn_result
1258 ureg_emit_insn(struct ureg_program
*ureg
,
1259 enum tgsi_opcode opcode
,
1265 union tgsi_any_token
*out
;
1267 struct ureg_emit_insn_result result
;
1269 validate( opcode
, num_dst
, num_src
);
1271 out
= get_tokens( ureg
, DOMAIN_INSN
, count
);
1272 out
[0].insn
= tgsi_default_instruction();
1273 out
[0].insn
.Opcode
= opcode
;
1274 out
[0].insn
.Saturate
= saturate
;
1275 out
[0].insn
.Precise
= precise
;
1276 out
[0].insn
.NumDstRegs
= num_dst
;
1277 out
[0].insn
.NumSrcRegs
= num_src
;
1279 result
.insn_token
= ureg
->domain
[DOMAIN_INSN
].count
- count
;
1280 result
.extended_token
= result
.insn_token
;
1282 ureg
->nr_instructions
++;
1289 * Emit a label token.
1290 * \param label_token returns a token number indicating where the label
1291 * needs to be patched later. Later, this value should be passed to the
1292 * ureg_fixup_label() function.
1295 ureg_emit_label(struct ureg_program
*ureg
,
1296 unsigned extended_token
,
1297 unsigned *label_token
)
1299 union tgsi_any_token
*out
, *insn
;
1304 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1307 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1308 insn
->insn
.Label
= 1;
1310 *label_token
= ureg
->domain
[DOMAIN_INSN
].count
- 1;
1313 /* Will return a number which can be used in a label to point to the
1314 * next instruction to be emitted.
1317 ureg_get_instruction_number( struct ureg_program
*ureg
)
1319 return ureg
->nr_instructions
;
1322 /* Patch a given label (expressed as a token number) to point to a
1323 * given instruction (expressed as an instruction number).
1326 ureg_fixup_label(struct ureg_program
*ureg
,
1327 unsigned label_token
,
1328 unsigned instruction_number
)
1330 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, label_token
);
1332 out
->insn_label
.Label
= instruction_number
;
1337 ureg_emit_texture(struct ureg_program
*ureg
,
1338 unsigned extended_token
,
1339 enum tgsi_texture_type target
,
1340 enum tgsi_return_type return_type
, unsigned num_offsets
)
1342 union tgsi_any_token
*out
, *insn
;
1344 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1345 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1347 insn
->insn
.Texture
= 1;
1350 out
[0].insn_texture
.Texture
= target
;
1351 out
[0].insn_texture
.NumOffsets
= num_offsets
;
1352 out
[0].insn_texture
.ReturnType
= return_type
;
1356 ureg_emit_texture_offset(struct ureg_program
*ureg
,
1357 const struct tgsi_texture_offset
*offset
)
1359 union tgsi_any_token
*out
;
1361 out
= get_tokens( ureg
, DOMAIN_INSN
, 1);
1364 out
[0].insn_texture_offset
= *offset
;
1368 ureg_emit_memory(struct ureg_program
*ureg
,
1369 unsigned extended_token
,
1371 enum tgsi_texture_type texture
,
1372 enum pipe_format format
)
1374 union tgsi_any_token
*out
, *insn
;
1376 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1377 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1379 insn
->insn
.Memory
= 1;
1382 out
[0].insn_memory
.Qualifier
= qualifier
;
1383 out
[0].insn_memory
.Texture
= texture
;
1384 out
[0].insn_memory
.Format
= format
;
1388 ureg_fixup_insn_size(struct ureg_program
*ureg
,
1391 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, insn
);
1393 assert(out
->insn
.Type
== TGSI_TOKEN_TYPE_INSTRUCTION
);
1394 out
->insn
.NrTokens
= ureg
->domain
[DOMAIN_INSN
].count
- insn
- 1;
1399 ureg_insn(struct ureg_program
*ureg
,
1400 enum tgsi_opcode opcode
,
1401 const struct ureg_dst
*dst
,
1403 const struct ureg_src
*src
,
1407 struct ureg_emit_insn_result insn
;
1411 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1415 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1417 insn
= ureg_emit_insn(ureg
,
1424 for (i
= 0; i
< nr_dst
; i
++)
1425 ureg_emit_dst( ureg
, dst
[i
] );
1427 for (i
= 0; i
< nr_src
; i
++)
1428 ureg_emit_src( ureg
, src
[i
] );
1430 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1434 ureg_tex_insn(struct ureg_program
*ureg
,
1435 enum tgsi_opcode opcode
,
1436 const struct ureg_dst
*dst
,
1438 enum tgsi_texture_type target
,
1439 enum tgsi_return_type return_type
,
1440 const struct tgsi_texture_offset
*texoffsets
,
1442 const struct ureg_src
*src
,
1445 struct ureg_emit_insn_result insn
;
1449 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1453 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1455 insn
= ureg_emit_insn(ureg
,
1462 ureg_emit_texture( ureg
, insn
.extended_token
, target
, return_type
,
1465 for (i
= 0; i
< nr_offset
; i
++)
1466 ureg_emit_texture_offset( ureg
, &texoffsets
[i
]);
1468 for (i
= 0; i
< nr_dst
; i
++)
1469 ureg_emit_dst( ureg
, dst
[i
] );
1471 for (i
= 0; i
< nr_src
; i
++)
1472 ureg_emit_src( ureg
, src
[i
] );
1474 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1479 ureg_memory_insn(struct ureg_program
*ureg
,
1480 enum tgsi_opcode opcode
,
1481 const struct ureg_dst
*dst
,
1483 const struct ureg_src
*src
,
1486 enum tgsi_texture_type texture
,
1487 enum pipe_format format
)
1489 struct ureg_emit_insn_result insn
;
1492 insn
= ureg_emit_insn(ureg
,
1499 ureg_emit_memory(ureg
, insn
.extended_token
, qualifier
, texture
, format
);
1501 for (i
= 0; i
< nr_dst
; i
++)
1502 ureg_emit_dst(ureg
, dst
[i
]);
1504 for (i
= 0; i
< nr_src
; i
++)
1505 ureg_emit_src(ureg
, src
[i
]);
1507 ureg_fixup_insn_size(ureg
, insn
.insn_token
);
1512 emit_decl_semantic(struct ureg_program
*ureg
,
1516 enum tgsi_semantic semantic_name
,
1517 unsigned semantic_index
,
1519 unsigned usage_mask
,
1523 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1526 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1527 out
[0].decl
.NrTokens
= 3;
1528 out
[0].decl
.File
= file
;
1529 out
[0].decl
.UsageMask
= usage_mask
;
1530 out
[0].decl
.Semantic
= 1;
1531 out
[0].decl
.Array
= array_id
!= 0;
1532 out
[0].decl
.Invariant
= invariant
;
1535 out
[1].decl_range
.First
= first
;
1536 out
[1].decl_range
.Last
= last
;
1539 out
[2].decl_semantic
.Name
= semantic_name
;
1540 out
[2].decl_semantic
.Index
= semantic_index
;
1541 out
[2].decl_semantic
.StreamX
= streams
& 3;
1542 out
[2].decl_semantic
.StreamY
= (streams
>> 2) & 3;
1543 out
[2].decl_semantic
.StreamZ
= (streams
>> 4) & 3;
1544 out
[2].decl_semantic
.StreamW
= (streams
>> 6) & 3;
1548 out
[3].array
.ArrayID
= array_id
;
1553 emit_decl_atomic_2d(struct ureg_program
*ureg
,
1559 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1562 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1563 out
[0].decl
.NrTokens
= 3;
1564 out
[0].decl
.File
= TGSI_FILE_HW_ATOMIC
;
1565 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1566 out
[0].decl
.Dimension
= 1;
1567 out
[0].decl
.Array
= array_id
!= 0;
1570 out
[1].decl_range
.First
= first
;
1571 out
[1].decl_range
.Last
= last
;
1574 out
[2].decl_dim
.Index2D
= index2D
;
1578 out
[3].array
.ArrayID
= array_id
;
1583 emit_decl_fs(struct ureg_program
*ureg
,
1587 enum tgsi_semantic semantic_name
,
1588 unsigned semantic_index
,
1589 enum tgsi_interpolate_mode interpolate
,
1590 unsigned cylindrical_wrap
,
1591 enum tgsi_interpolate_loc interpolate_location
,
1593 unsigned usage_mask
)
1595 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
,
1599 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1600 out
[0].decl
.NrTokens
= 4;
1601 out
[0].decl
.File
= file
;
1602 out
[0].decl
.UsageMask
= usage_mask
;
1603 out
[0].decl
.Interpolate
= 1;
1604 out
[0].decl
.Semantic
= 1;
1605 out
[0].decl
.Array
= array_id
!= 0;
1608 out
[1].decl_range
.First
= first
;
1609 out
[1].decl_range
.Last
= last
;
1612 out
[2].decl_interp
.Interpolate
= interpolate
;
1613 out
[2].decl_interp
.CylindricalWrap
= cylindrical_wrap
;
1614 out
[2].decl_interp
.Location
= interpolate_location
;
1617 out
[3].decl_semantic
.Name
= semantic_name
;
1618 out
[3].decl_semantic
.Index
= semantic_index
;
1622 out
[4].array
.ArrayID
= array_id
;
1627 emit_decl_temps( struct ureg_program
*ureg
,
1628 unsigned first
, unsigned last
,
1632 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
,
1636 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1637 out
[0].decl
.NrTokens
= 2;
1638 out
[0].decl
.File
= TGSI_FILE_TEMPORARY
;
1639 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1640 out
[0].decl
.Local
= local
;
1643 out
[1].decl_range
.First
= first
;
1644 out
[1].decl_range
.Last
= last
;
1647 out
[0].decl
.Array
= 1;
1649 out
[2].array
.ArrayID
= arrayid
;
1653 static void emit_decl_range( struct ureg_program
*ureg
,
1658 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
1661 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1662 out
[0].decl
.NrTokens
= 2;
1663 out
[0].decl
.File
= file
;
1664 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1665 out
[0].decl
.Semantic
= 0;
1668 out
[1].decl_range
.First
= first
;
1669 out
[1].decl_range
.Last
= first
+ count
- 1;
1673 emit_decl_range2D(struct ureg_program
*ureg
,
1679 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1682 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1683 out
[0].decl
.NrTokens
= 3;
1684 out
[0].decl
.File
= file
;
1685 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1686 out
[0].decl
.Dimension
= 1;
1689 out
[1].decl_range
.First
= first
;
1690 out
[1].decl_range
.Last
= last
;
1693 out
[2].decl_dim
.Index2D
= index2D
;
1697 emit_decl_sampler_view(struct ureg_program
*ureg
,
1699 enum tgsi_texture_type target
,
1700 enum tgsi_return_type return_type_x
,
1701 enum tgsi_return_type return_type_y
,
1702 enum tgsi_return_type return_type_z
,
1703 enum tgsi_return_type return_type_w
)
1705 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1708 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1709 out
[0].decl
.NrTokens
= 3;
1710 out
[0].decl
.File
= TGSI_FILE_SAMPLER_VIEW
;
1711 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1714 out
[1].decl_range
.First
= index
;
1715 out
[1].decl_range
.Last
= index
;
1718 out
[2].decl_sampler_view
.Resource
= target
;
1719 out
[2].decl_sampler_view
.ReturnTypeX
= return_type_x
;
1720 out
[2].decl_sampler_view
.ReturnTypeY
= return_type_y
;
1721 out
[2].decl_sampler_view
.ReturnTypeZ
= return_type_z
;
1722 out
[2].decl_sampler_view
.ReturnTypeW
= return_type_w
;
1726 emit_decl_image(struct ureg_program
*ureg
,
1728 enum tgsi_texture_type target
,
1729 enum pipe_format format
,
1733 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1736 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1737 out
[0].decl
.NrTokens
= 3;
1738 out
[0].decl
.File
= TGSI_FILE_IMAGE
;
1739 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1742 out
[1].decl_range
.First
= index
;
1743 out
[1].decl_range
.Last
= index
;
1746 out
[2].decl_image
.Resource
= target
;
1747 out
[2].decl_image
.Writable
= wr
;
1748 out
[2].decl_image
.Raw
= raw
;
1749 out
[2].decl_image
.Format
= format
;
1753 emit_decl_buffer(struct ureg_program
*ureg
,
1757 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1760 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1761 out
[0].decl
.NrTokens
= 2;
1762 out
[0].decl
.File
= TGSI_FILE_BUFFER
;
1763 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1764 out
[0].decl
.Atomic
= atomic
;
1767 out
[1].decl_range
.First
= index
;
1768 out
[1].decl_range
.Last
= index
;
1772 emit_decl_memory(struct ureg_program
*ureg
, unsigned memory_type
)
1774 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1777 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1778 out
[0].decl
.NrTokens
= 2;
1779 out
[0].decl
.File
= TGSI_FILE_MEMORY
;
1780 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1781 out
[0].decl
.MemType
= memory_type
;
1784 out
[1].decl_range
.First
= memory_type
;
1785 out
[1].decl_range
.Last
= memory_type
;
1789 emit_immediate( struct ureg_program
*ureg
,
1793 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 5 );
1796 out
[0].imm
.Type
= TGSI_TOKEN_TYPE_IMMEDIATE
;
1797 out
[0].imm
.NrTokens
= 5;
1798 out
[0].imm
.DataType
= type
;
1799 out
[0].imm
.Padding
= 0;
1801 out
[1].imm_data
.Uint
= v
[0];
1802 out
[2].imm_data
.Uint
= v
[1];
1803 out
[3].imm_data
.Uint
= v
[2];
1804 out
[4].imm_data
.Uint
= v
[3];
1808 emit_property(struct ureg_program
*ureg
,
1812 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1815 out
[0].prop
.Type
= TGSI_TOKEN_TYPE_PROPERTY
;
1816 out
[0].prop
.NrTokens
= 2;
1817 out
[0].prop
.PropertyName
= name
;
1819 out
[1].prop_data
.Data
= data
;
1823 static void emit_decls( struct ureg_program
*ureg
)
1827 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
1828 if (ureg
->properties
[i
] != ~0u)
1829 emit_property(ureg
, i
, ureg
->properties
[i
]);
1831 if (ureg
->processor
== PIPE_SHADER_VERTEX
) {
1832 for (i
= 0; i
< PIPE_MAX_ATTRIBS
; i
++) {
1833 if (ureg
->vs_inputs
[i
/32] & (1u << (i
%32))) {
1834 emit_decl_range( ureg
, TGSI_FILE_INPUT
, i
, 1 );
1837 } else if (ureg
->processor
== PIPE_SHADER_FRAGMENT
) {
1838 if (ureg
->supports_any_inout_decl_range
) {
1839 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1842 ureg
->input
[i
].first
,
1843 ureg
->input
[i
].last
,
1844 ureg
->input
[i
].semantic_name
,
1845 ureg
->input
[i
].semantic_index
,
1846 ureg
->input
[i
].interp
,
1847 ureg
->input
[i
].cylindrical_wrap
,
1848 ureg
->input
[i
].interp_location
,
1849 ureg
->input
[i
].array_id
,
1850 ureg
->input
[i
].usage_mask
);
1854 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1855 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1859 ureg
->input
[i
].semantic_name
,
1860 ureg
->input
[i
].semantic_index
+
1861 (j
- ureg
->input
[i
].first
),
1862 ureg
->input
[i
].interp
,
1863 ureg
->input
[i
].cylindrical_wrap
,
1864 ureg
->input
[i
].interp_location
, 0,
1865 ureg
->input
[i
].usage_mask
);
1870 if (ureg
->supports_any_inout_decl_range
) {
1871 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1872 emit_decl_semantic(ureg
,
1874 ureg
->input
[i
].first
,
1875 ureg
->input
[i
].last
,
1876 ureg
->input
[i
].semantic_name
,
1877 ureg
->input
[i
].semantic_index
,
1879 TGSI_WRITEMASK_XYZW
,
1880 ureg
->input
[i
].array_id
,
1885 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1886 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1887 emit_decl_semantic(ureg
,
1890 ureg
->input
[i
].semantic_name
,
1891 ureg
->input
[i
].semantic_index
+
1892 (j
- ureg
->input
[i
].first
),
1894 TGSI_WRITEMASK_XYZW
, 0, FALSE
);
1900 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
1901 emit_decl_semantic(ureg
,
1902 TGSI_FILE_SYSTEM_VALUE
,
1905 ureg
->system_value
[i
].semantic_name
,
1906 ureg
->system_value
[i
].semantic_index
,
1908 TGSI_WRITEMASK_XYZW
, 0, FALSE
);
1911 if (ureg
->supports_any_inout_decl_range
) {
1912 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1913 emit_decl_semantic(ureg
,
1915 ureg
->output
[i
].first
,
1916 ureg
->output
[i
].last
,
1917 ureg
->output
[i
].semantic_name
,
1918 ureg
->output
[i
].semantic_index
,
1919 ureg
->output
[i
].streams
,
1920 ureg
->output
[i
].usage_mask
,
1921 ureg
->output
[i
].array_id
,
1922 ureg
->output
[i
].invariant
);
1926 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1927 for (j
= ureg
->output
[i
].first
; j
<= ureg
->output
[i
].last
; j
++) {
1928 emit_decl_semantic(ureg
,
1931 ureg
->output
[i
].semantic_name
,
1932 ureg
->output
[i
].semantic_index
+
1933 (j
- ureg
->output
[i
].first
),
1934 ureg
->output
[i
].streams
,
1935 ureg
->output
[i
].usage_mask
,
1937 ureg
->output
[i
].invariant
);
1942 for (i
= 0; i
< ureg
->nr_samplers
; i
++) {
1943 emit_decl_range( ureg
,
1945 ureg
->sampler
[i
].Index
, 1 );
1948 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
1949 emit_decl_sampler_view(ureg
,
1950 ureg
->sampler_view
[i
].index
,
1951 ureg
->sampler_view
[i
].target
,
1952 ureg
->sampler_view
[i
].return_type_x
,
1953 ureg
->sampler_view
[i
].return_type_y
,
1954 ureg
->sampler_view
[i
].return_type_z
,
1955 ureg
->sampler_view
[i
].return_type_w
);
1958 for (i
= 0; i
< ureg
->nr_images
; i
++) {
1959 emit_decl_image(ureg
,
1960 ureg
->image
[i
].index
,
1961 ureg
->image
[i
].target
,
1962 ureg
->image
[i
].format
,
1964 ureg
->image
[i
].raw
);
1967 for (i
= 0; i
< ureg
->nr_buffers
; i
++) {
1968 emit_decl_buffer(ureg
, ureg
->buffer
[i
].index
, ureg
->buffer
[i
].atomic
);
1971 for (i
= 0; i
< TGSI_MEMORY_TYPE_COUNT
; i
++) {
1972 if (ureg
->use_memory
[i
])
1973 emit_decl_memory(ureg
, i
);
1976 for (i
= 0; i
< PIPE_MAX_CONSTANT_BUFFERS
; i
++) {
1977 struct const_decl
*decl
= &ureg
->const_decls
[i
];
1979 if (decl
->nr_constant_ranges
) {
1982 for (j
= 0; j
< decl
->nr_constant_ranges
; j
++) {
1983 emit_decl_range2D(ureg
,
1985 decl
->constant_range
[j
].first
,
1986 decl
->constant_range
[j
].last
,
1992 for (i
= 0; i
< PIPE_MAX_HW_ATOMIC_BUFFERS
; i
++) {
1993 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[i
];
1995 if (decl
->nr_hw_atomic_ranges
) {
1998 for (j
= 0; j
< decl
->nr_hw_atomic_ranges
; j
++) {
1999 emit_decl_atomic_2d(ureg
,
2000 decl
->hw_atomic_range
[j
].first
,
2001 decl
->hw_atomic_range
[j
].last
,
2003 decl
->hw_atomic_range
[j
].array_id
);
2008 if (ureg
->nr_temps
) {
2010 for (i
= 0; i
< ureg
->nr_temps
;) {
2011 boolean local
= util_bitmask_get(ureg
->local_temps
, i
);
2013 i
= util_bitmask_get_next_index(ureg
->decl_temps
, i
+ 1);
2014 if (i
== UTIL_BITMASK_INVALID_INDEX
)
2017 if (array
< ureg
->nr_array_temps
&& ureg
->array_temps
[array
] == first
)
2018 emit_decl_temps( ureg
, first
, i
- 1, local
, ++array
);
2020 emit_decl_temps( ureg
, first
, i
- 1, local
, 0 );
2024 if (ureg
->nr_addrs
) {
2025 emit_decl_range( ureg
,
2027 0, ureg
->nr_addrs
);
2030 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
2031 emit_immediate( ureg
,
2032 ureg
->immediate
[i
].value
.u
,
2033 ureg
->immediate
[i
].type
);
2037 /* Append the instruction tokens onto the declarations to build a
2038 * contiguous stream suitable to send to the driver.
2040 static void copy_instructions( struct ureg_program
*ureg
)
2042 unsigned nr_tokens
= ureg
->domain
[DOMAIN_INSN
].count
;
2043 union tgsi_any_token
*out
= get_tokens( ureg
,
2048 ureg
->domain
[DOMAIN_INSN
].tokens
,
2049 nr_tokens
* sizeof out
[0] );
2054 fixup_header_size(struct ureg_program
*ureg
)
2056 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_DECL
, 0 );
2058 out
->header
.BodySize
= ureg
->domain
[DOMAIN_DECL
].count
- 2;
2063 emit_header( struct ureg_program
*ureg
)
2065 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
2067 out
[0].header
.HeaderSize
= 2;
2068 out
[0].header
.BodySize
= 0;
2070 out
[1].processor
.Processor
= ureg
->processor
;
2071 out
[1].processor
.Padding
= 0;
2075 const struct tgsi_token
*ureg_finalize( struct ureg_program
*ureg
)
2077 const struct tgsi_token
*tokens
;
2079 switch (ureg
->processor
) {
2080 case PIPE_SHADER_VERTEX
:
2081 case PIPE_SHADER_TESS_EVAL
:
2082 ureg_property(ureg
, TGSI_PROPERTY_NEXT_SHADER
,
2083 ureg
->next_shader_processor
== -1 ?
2084 PIPE_SHADER_FRAGMENT
:
2085 ureg
->next_shader_processor
);
2091 emit_header( ureg
);
2093 copy_instructions( ureg
);
2094 fixup_header_size( ureg
);
2096 if (ureg
->domain
[0].tokens
== error_tokens
||
2097 ureg
->domain
[1].tokens
== error_tokens
) {
2098 debug_printf("%s: error in generated shader\n", __FUNCTION__
);
2103 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2106 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__
,
2107 ureg
->domain
[DOMAIN_DECL
].count
);
2108 tgsi_dump( tokens
, 0 );
2112 /* tgsi_sanity doesn't seem to return if there are too many constants. */
2113 bool too_many_constants
= false;
2114 for (unsigned i
= 0; i
< ARRAY_SIZE(ureg
->const_decls
); i
++) {
2115 for (unsigned j
= 0; j
< ureg
->const_decls
[i
].nr_constant_ranges
; j
++) {
2116 if (ureg
->const_decls
[i
].constant_range
[j
].last
> 4096) {
2117 too_many_constants
= true;
2123 if (tokens
&& !too_many_constants
&& !tgsi_sanity_check(tokens
)) {
2124 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
2125 tgsi_dump(tokens
, 0);
2135 void *ureg_create_shader( struct ureg_program
*ureg
,
2136 struct pipe_context
*pipe
,
2137 const struct pipe_stream_output_info
*so
)
2139 struct pipe_shader_state state
= {0};
2141 pipe_shader_state_from_tgsi(&state
, ureg_finalize(ureg
));
2146 state
.stream_output
= *so
;
2148 switch (ureg
->processor
) {
2149 case PIPE_SHADER_VERTEX
:
2150 return pipe
->create_vs_state(pipe
, &state
);
2151 case PIPE_SHADER_TESS_CTRL
:
2152 return pipe
->create_tcs_state(pipe
, &state
);
2153 case PIPE_SHADER_TESS_EVAL
:
2154 return pipe
->create_tes_state(pipe
, &state
);
2155 case PIPE_SHADER_GEOMETRY
:
2156 return pipe
->create_gs_state(pipe
, &state
);
2157 case PIPE_SHADER_FRAGMENT
:
2158 return pipe
->create_fs_state(pipe
, &state
);
2165 const struct tgsi_token
*ureg_get_tokens( struct ureg_program
*ureg
,
2166 unsigned *nr_tokens
)
2168 const struct tgsi_token
*tokens
;
2170 ureg_finalize(ureg
);
2172 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2175 *nr_tokens
= ureg
->domain
[DOMAIN_DECL
].count
;
2177 ureg
->domain
[DOMAIN_DECL
].tokens
= 0;
2178 ureg
->domain
[DOMAIN_DECL
].size
= 0;
2179 ureg
->domain
[DOMAIN_DECL
].order
= 0;
2180 ureg
->domain
[DOMAIN_DECL
].count
= 0;
2186 void ureg_free_tokens( const struct tgsi_token
*tokens
)
2188 FREE((struct tgsi_token
*)tokens
);
2192 struct ureg_program
*
2193 ureg_create(enum pipe_shader_type processor
)
2195 return ureg_create_with_screen(processor
, NULL
);
2199 struct ureg_program
*
2200 ureg_create_with_screen(enum pipe_shader_type processor
,
2201 struct pipe_screen
*screen
)
2204 struct ureg_program
*ureg
= CALLOC_STRUCT( ureg_program
);
2208 ureg
->processor
= processor
;
2209 ureg
->supports_any_inout_decl_range
=
2211 screen
->get_shader_param(screen
, processor
,
2212 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE
) != 0;
2213 ureg
->next_shader_processor
= -1;
2215 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
2216 ureg
->properties
[i
] = ~0;
2218 ureg
->free_temps
= util_bitmask_create();
2219 if (ureg
->free_temps
== NULL
)
2222 ureg
->local_temps
= util_bitmask_create();
2223 if (ureg
->local_temps
== NULL
)
2224 goto no_local_temps
;
2226 ureg
->decl_temps
= util_bitmask_create();
2227 if (ureg
->decl_temps
== NULL
)
2233 util_bitmask_destroy(ureg
->local_temps
);
2235 util_bitmask_destroy(ureg
->free_temps
);
2244 ureg_set_next_shader_processor(struct ureg_program
*ureg
, unsigned processor
)
2246 ureg
->next_shader_processor
= processor
;
2251 ureg_get_nr_outputs( const struct ureg_program
*ureg
)
2255 return ureg
->nr_outputs
;
2259 ureg_setup_clipdist_info(struct ureg_program
*ureg
,
2260 const struct shader_info
*info
)
2262 if (info
->clip_distance_array_size
)
2263 ureg_property(ureg
, TGSI_PROPERTY_NUM_CLIPDIST_ENABLED
,
2264 info
->clip_distance_array_size
);
2265 if (info
->cull_distance_array_size
)
2266 ureg_property(ureg
, TGSI_PROPERTY_NUM_CULLDIST_ENABLED
,
2267 info
->cull_distance_array_size
);
2271 ureg_setup_tess_ctrl_shader(struct ureg_program
*ureg
,
2272 const struct shader_info
*info
)
2274 ureg_property(ureg
, TGSI_PROPERTY_TCS_VERTICES_OUT
,
2275 info
->tess
.tcs_vertices_out
);
2279 ureg_setup_tess_eval_shader(struct ureg_program
*ureg
,
2280 const struct shader_info
*info
)
2282 if (info
->tess
.primitive_mode
== GL_ISOLINES
)
2283 ureg_property(ureg
, TGSI_PROPERTY_TES_PRIM_MODE
, GL_LINES
);
2285 ureg_property(ureg
, TGSI_PROPERTY_TES_PRIM_MODE
,
2286 info
->tess
.primitive_mode
);
2288 STATIC_ASSERT((TESS_SPACING_EQUAL
+ 1) % 3 == PIPE_TESS_SPACING_EQUAL
);
2289 STATIC_ASSERT((TESS_SPACING_FRACTIONAL_ODD
+ 1) % 3 ==
2290 PIPE_TESS_SPACING_FRACTIONAL_ODD
);
2291 STATIC_ASSERT((TESS_SPACING_FRACTIONAL_EVEN
+ 1) % 3 ==
2292 PIPE_TESS_SPACING_FRACTIONAL_EVEN
);
2294 ureg_property(ureg
, TGSI_PROPERTY_TES_SPACING
,
2295 (info
->tess
.spacing
+ 1) % 3);
2297 ureg_property(ureg
, TGSI_PROPERTY_TES_VERTEX_ORDER_CW
,
2299 ureg_property(ureg
, TGSI_PROPERTY_TES_POINT_MODE
,
2300 info
->tess
.point_mode
);
2304 ureg_setup_geometry_shader(struct ureg_program
*ureg
,
2305 const struct shader_info
*info
)
2307 ureg_property(ureg
, TGSI_PROPERTY_GS_INPUT_PRIM
,
2308 info
->gs
.input_primitive
);
2309 ureg_property(ureg
, TGSI_PROPERTY_GS_OUTPUT_PRIM
,
2310 info
->gs
.output_primitive
);
2311 ureg_property(ureg
, TGSI_PROPERTY_GS_MAX_OUTPUT_VERTICES
,
2312 info
->gs
.vertices_out
);
2313 ureg_property(ureg
, TGSI_PROPERTY_GS_INVOCATIONS
,
2314 info
->gs
.invocations
);
2318 ureg_setup_fragment_shader(struct ureg_program
*ureg
,
2319 const struct shader_info
*info
)
2321 if (info
->fs
.early_fragment_tests
|| info
->fs
.post_depth_coverage
) {
2322 ureg_property(ureg
, TGSI_PROPERTY_FS_EARLY_DEPTH_STENCIL
, 1);
2324 if (info
->fs
.post_depth_coverage
)
2325 ureg_property(ureg
, TGSI_PROPERTY_FS_POST_DEPTH_COVERAGE
, 1);
2328 if (info
->fs
.depth_layout
!= FRAG_DEPTH_LAYOUT_NONE
) {
2329 switch (info
->fs
.depth_layout
) {
2330 case FRAG_DEPTH_LAYOUT_ANY
:
2331 ureg_property(ureg
, TGSI_PROPERTY_FS_DEPTH_LAYOUT
,
2332 TGSI_FS_DEPTH_LAYOUT_ANY
);
2334 case FRAG_DEPTH_LAYOUT_GREATER
:
2335 ureg_property(ureg
, TGSI_PROPERTY_FS_DEPTH_LAYOUT
,
2336 TGSI_FS_DEPTH_LAYOUT_GREATER
);
2338 case FRAG_DEPTH_LAYOUT_LESS
:
2339 ureg_property(ureg
, TGSI_PROPERTY_FS_DEPTH_LAYOUT
,
2340 TGSI_FS_DEPTH_LAYOUT_LESS
);
2342 case FRAG_DEPTH_LAYOUT_UNCHANGED
:
2343 ureg_property(ureg
, TGSI_PROPERTY_FS_DEPTH_LAYOUT
,
2344 TGSI_FS_DEPTH_LAYOUT_UNCHANGED
);
2353 ureg_setup_compute_shader(struct ureg_program
*ureg
,
2354 const struct shader_info
*info
)
2356 ureg_property(ureg
, TGSI_PROPERTY_CS_FIXED_BLOCK_WIDTH
,
2357 info
->cs
.local_size
[0]);
2358 ureg_property(ureg
, TGSI_PROPERTY_CS_FIXED_BLOCK_HEIGHT
,
2359 info
->cs
.local_size
[1]);
2360 ureg_property(ureg
, TGSI_PROPERTY_CS_FIXED_BLOCK_DEPTH
,
2361 info
->cs
.local_size
[2]);
2363 if (info
->cs
.shared_size
)
2364 ureg_DECL_memory(ureg
, TGSI_MEMORY_TYPE_SHARED
);
2368 ureg_setup_shader_info(struct ureg_program
*ureg
,
2369 const struct shader_info
*info
)
2371 if (info
->layer_viewport_relative
)
2372 ureg_property(ureg
, TGSI_PROPERTY_LAYER_VIEWPORT_RELATIVE
, 1);
2374 switch (info
->stage
) {
2375 case MESA_SHADER_VERTEX
:
2376 ureg_setup_clipdist_info(ureg
, info
);
2377 ureg_set_next_shader_processor(ureg
, pipe_shader_type_from_mesa(info
->next_stage
));
2379 case MESA_SHADER_TESS_CTRL
:
2380 ureg_setup_tess_ctrl_shader(ureg
, info
);
2382 case MESA_SHADER_TESS_EVAL
:
2383 ureg_setup_tess_eval_shader(ureg
, info
);
2384 ureg_setup_clipdist_info(ureg
, info
);
2385 ureg_set_next_shader_processor(ureg
, pipe_shader_type_from_mesa(info
->next_stage
));
2387 case MESA_SHADER_GEOMETRY
:
2388 ureg_setup_geometry_shader(ureg
, info
);
2389 ureg_setup_clipdist_info(ureg
, info
);
2391 case MESA_SHADER_FRAGMENT
:
2392 ureg_setup_fragment_shader(ureg
, info
);
2394 case MESA_SHADER_COMPUTE
:
2395 ureg_setup_compute_shader(ureg
, info
);
2403 void ureg_destroy( struct ureg_program
*ureg
)
2407 for (i
= 0; i
< ARRAY_SIZE(ureg
->domain
); i
++) {
2408 if (ureg
->domain
[i
].tokens
&&
2409 ureg
->domain
[i
].tokens
!= error_tokens
)
2410 FREE(ureg
->domain
[i
].tokens
);
2413 util_bitmask_destroy(ureg
->free_temps
);
2414 util_bitmask_destroy(ureg
->local_temps
);
2415 util_bitmask_destroy(ureg
->decl_temps
);