1 /**************************************************************************
3 * Copyright 2009-2010 VMware, Inc.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_info.h"
35 #include "tgsi/tgsi_dump.h"
36 #include "tgsi/tgsi_sanity.h"
37 #include "util/u_debug.h"
38 #include "util/u_inlines.h"
39 #include "util/u_memory.h"
40 #include "util/u_math.h"
41 #include "util/u_bitmask.h"
43 union tgsi_any_token
{
44 struct tgsi_header header
;
45 struct tgsi_processor processor
;
46 struct tgsi_token token
;
47 struct tgsi_property prop
;
48 struct tgsi_property_data prop_data
;
49 struct tgsi_declaration decl
;
50 struct tgsi_declaration_range decl_range
;
51 struct tgsi_declaration_dimension decl_dim
;
52 struct tgsi_declaration_interp decl_interp
;
53 struct tgsi_declaration_image decl_image
;
54 struct tgsi_declaration_semantic decl_semantic
;
55 struct tgsi_declaration_sampler_view decl_sampler_view
;
56 struct tgsi_declaration_array array
;
57 struct tgsi_immediate imm
;
58 union tgsi_immediate_data imm_data
;
59 struct tgsi_instruction insn
;
60 struct tgsi_instruction_label insn_label
;
61 struct tgsi_instruction_texture insn_texture
;
62 struct tgsi_instruction_memory insn_memory
;
63 struct tgsi_texture_offset insn_texture_offset
;
64 struct tgsi_src_register src
;
65 struct tgsi_ind_register ind
;
66 struct tgsi_dimension dim
;
67 struct tgsi_dst_register dst
;
73 union tgsi_any_token
*tokens
;
79 #define UREG_MAX_INPUT (4 * PIPE_MAX_SHADER_INPUTS)
80 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
81 #define UREG_MAX_OUTPUT (4 * PIPE_MAX_SHADER_OUTPUTS)
82 #define UREG_MAX_CONSTANT_RANGE 32
83 #define UREG_MAX_HW_ATOMIC_RANGE 32
84 #define UREG_MAX_IMMEDIATE 4096
85 #define UREG_MAX_ADDR 3
86 #define UREG_MAX_ARRAY_TEMPS 256
92 } constant_range
[UREG_MAX_CONSTANT_RANGE
];
93 unsigned nr_constant_ranges
;
96 struct hw_atomic_decl
{
101 } hw_atomic_range
[UREG_MAX_HW_ATOMIC_RANGE
];
102 unsigned nr_hw_atomic_ranges
;
105 #define DOMAIN_DECL 0
106 #define DOMAIN_INSN 1
111 bool supports_any_inout_decl_range
;
112 int next_shader_processor
;
115 enum tgsi_semantic semantic_name
;
116 unsigned semantic_index
;
117 enum tgsi_interpolate_mode interp
;
118 unsigned char cylindrical_wrap
;
119 unsigned char usage_mask
;
120 enum tgsi_interpolate_loc interp_location
;
124 } input
[UREG_MAX_INPUT
];
125 unsigned nr_inputs
, nr_input_regs
;
127 unsigned vs_inputs
[PIPE_MAX_ATTRIBS
/32];
130 enum tgsi_semantic semantic_name
;
131 unsigned semantic_index
;
132 } system_value
[UREG_MAX_SYSTEM_VALUE
];
133 unsigned nr_system_values
;
136 enum tgsi_semantic semantic_name
;
137 unsigned semantic_index
;
139 unsigned usage_mask
; /* = TGSI_WRITEMASK_* */
143 } output
[UREG_MAX_OUTPUT
];
144 unsigned nr_outputs
, nr_output_regs
;
154 } immediate
[UREG_MAX_IMMEDIATE
];
155 unsigned nr_immediates
;
157 struct ureg_src sampler
[PIPE_MAX_SAMPLERS
];
158 unsigned nr_samplers
;
162 enum tgsi_texture_type target
;
163 enum tgsi_return_type return_type_x
;
164 enum tgsi_return_type return_type_y
;
165 enum tgsi_return_type return_type_z
;
166 enum tgsi_return_type return_type_w
;
167 } sampler_view
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
168 unsigned nr_sampler_views
;
172 enum tgsi_texture_type target
;
176 } image
[PIPE_MAX_SHADER_IMAGES
];
182 } buffer
[PIPE_MAX_SHADER_BUFFERS
];
185 struct util_bitmask
*free_temps
;
186 struct util_bitmask
*local_temps
;
187 struct util_bitmask
*decl_temps
;
190 unsigned array_temps
[UREG_MAX_ARRAY_TEMPS
];
191 unsigned nr_array_temps
;
193 struct const_decl const_decls
[PIPE_MAX_CONSTANT_BUFFERS
];
195 struct hw_atomic_decl hw_atomic_decls
[PIPE_MAX_HW_ATOMIC_BUFFERS
];
197 unsigned properties
[TGSI_PROPERTY_COUNT
];
200 unsigned nr_instructions
;
202 struct ureg_tokens domain
[2];
204 bool use_memory
[TGSI_MEMORY_TYPE_COUNT
];
207 static union tgsi_any_token error_tokens
[32];
209 static void tokens_error( struct ureg_tokens
*tokens
)
211 if (tokens
->tokens
&& tokens
->tokens
!= error_tokens
)
212 FREE(tokens
->tokens
);
214 tokens
->tokens
= error_tokens
;
215 tokens
->size
= ARRAY_SIZE(error_tokens
);
220 static void tokens_expand( struct ureg_tokens
*tokens
,
223 unsigned old_size
= tokens
->size
* sizeof(unsigned);
225 if (tokens
->tokens
== error_tokens
) {
229 while (tokens
->count
+ count
> tokens
->size
) {
230 tokens
->size
= (1 << ++tokens
->order
);
233 tokens
->tokens
= REALLOC(tokens
->tokens
,
235 tokens
->size
* sizeof(unsigned));
236 if (tokens
->tokens
== NULL
) {
237 tokens_error(tokens
);
241 static void set_bad( struct ureg_program
*ureg
)
243 tokens_error(&ureg
->domain
[0]);
248 static union tgsi_any_token
*get_tokens( struct ureg_program
*ureg
,
252 struct ureg_tokens
*tokens
= &ureg
->domain
[domain
];
253 union tgsi_any_token
*result
;
255 if (tokens
->count
+ count
> tokens
->size
)
256 tokens_expand(tokens
, count
);
258 result
= &tokens
->tokens
[tokens
->count
];
259 tokens
->count
+= count
;
264 static union tgsi_any_token
*retrieve_token( struct ureg_program
*ureg
,
268 if (ureg
->domain
[domain
].tokens
== error_tokens
)
269 return &error_tokens
[0];
271 return &ureg
->domain
[domain
].tokens
[nr
];
276 ureg_property(struct ureg_program
*ureg
, unsigned name
, unsigned value
)
278 assert(name
< ARRAY_SIZE(ureg
->properties
));
279 ureg
->properties
[name
] = value
;
283 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program
*ureg
,
284 enum tgsi_semantic semantic_name
,
285 unsigned semantic_index
,
286 enum tgsi_interpolate_mode interp_mode
,
287 unsigned cylindrical_wrap
,
288 enum tgsi_interpolate_loc interp_location
,
296 assert(usage_mask
!= 0);
297 assert(usage_mask
<= TGSI_WRITEMASK_XYZW
);
299 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
300 if (ureg
->input
[i
].semantic_name
== semantic_name
&&
301 ureg
->input
[i
].semantic_index
== semantic_index
) {
302 assert(ureg
->input
[i
].interp
== interp_mode
);
303 assert(ureg
->input
[i
].cylindrical_wrap
== cylindrical_wrap
);
304 assert(ureg
->input
[i
].interp_location
== interp_location
);
305 if (ureg
->input
[i
].array_id
== array_id
) {
306 ureg
->input
[i
].usage_mask
|= usage_mask
;
309 assert((ureg
->input
[i
].usage_mask
& usage_mask
) == 0);
313 if (ureg
->nr_inputs
< UREG_MAX_INPUT
) {
314 assert(array_size
>= 1);
315 ureg
->input
[i
].semantic_name
= semantic_name
;
316 ureg
->input
[i
].semantic_index
= semantic_index
;
317 ureg
->input
[i
].interp
= interp_mode
;
318 ureg
->input
[i
].cylindrical_wrap
= cylindrical_wrap
;
319 ureg
->input
[i
].interp_location
= interp_location
;
320 ureg
->input
[i
].first
= index
;
321 ureg
->input
[i
].last
= index
+ array_size
- 1;
322 ureg
->input
[i
].array_id
= array_id
;
323 ureg
->input
[i
].usage_mask
= usage_mask
;
324 ureg
->nr_input_regs
= MAX2(ureg
->nr_input_regs
, index
+ array_size
);
331 return ureg_src_array_register(TGSI_FILE_INPUT
, ureg
->input
[i
].first
,
336 ureg_DECL_fs_input_cyl_centroid(struct ureg_program
*ureg
,
337 enum tgsi_semantic semantic_name
,
338 unsigned semantic_index
,
339 enum tgsi_interpolate_mode interp_mode
,
340 unsigned cylindrical_wrap
,
341 enum tgsi_interpolate_loc interp_location
,
345 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
346 semantic_name
, semantic_index
, interp_mode
,
347 cylindrical_wrap
, interp_location
,
348 ureg
->nr_input_regs
, TGSI_WRITEMASK_XYZW
, array_id
, array_size
);
353 ureg_DECL_vs_input( struct ureg_program
*ureg
,
356 assert(ureg
->processor
== PIPE_SHADER_VERTEX
);
357 assert(index
/ 32 < ARRAY_SIZE(ureg
->vs_inputs
));
359 ureg
->vs_inputs
[index
/32] |= 1 << (index
% 32);
360 return ureg_src_register( TGSI_FILE_INPUT
, index
);
365 ureg_DECL_input_layout(struct ureg_program
*ureg
,
366 enum tgsi_semantic semantic_name
,
367 unsigned semantic_index
,
373 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
374 semantic_name
, semantic_index
, 0, 0, 0,
375 index
, usage_mask
, array_id
, array_size
);
380 ureg_DECL_input(struct ureg_program
*ureg
,
381 enum tgsi_semantic semantic_name
,
382 unsigned semantic_index
,
386 return ureg_DECL_fs_input_cyl_centroid(ureg
, semantic_name
, semantic_index
,
387 0, 0, 0, array_id
, array_size
);
392 ureg_DECL_system_value(struct ureg_program
*ureg
,
393 enum tgsi_semantic semantic_name
,
394 unsigned semantic_index
)
398 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
399 if (ureg
->system_value
[i
].semantic_name
== semantic_name
&&
400 ureg
->system_value
[i
].semantic_index
== semantic_index
) {
405 if (ureg
->nr_system_values
< UREG_MAX_SYSTEM_VALUE
) {
406 ureg
->system_value
[ureg
->nr_system_values
].semantic_name
= semantic_name
;
407 ureg
->system_value
[ureg
->nr_system_values
].semantic_index
= semantic_index
;
408 i
= ureg
->nr_system_values
;
409 ureg
->nr_system_values
++;
415 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE
, i
);
420 ureg_DECL_output_layout(struct ureg_program
*ureg
,
421 enum tgsi_semantic semantic_name
,
422 unsigned semantic_index
,
431 assert(usage_mask
!= 0);
432 assert(!(streams
& 0x03) || (usage_mask
& 1));
433 assert(!(streams
& 0x0c) || (usage_mask
& 2));
434 assert(!(streams
& 0x30) || (usage_mask
& 4));
435 assert(!(streams
& 0xc0) || (usage_mask
& 8));
437 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
438 if (ureg
->output
[i
].semantic_name
== semantic_name
&&
439 ureg
->output
[i
].semantic_index
== semantic_index
) {
440 if (ureg
->output
[i
].array_id
== array_id
) {
441 ureg
->output
[i
].usage_mask
|= usage_mask
;
444 assert((ureg
->output
[i
].usage_mask
& usage_mask
) == 0);
448 if (ureg
->nr_outputs
< UREG_MAX_OUTPUT
) {
449 ureg
->output
[i
].semantic_name
= semantic_name
;
450 ureg
->output
[i
].semantic_index
= semantic_index
;
451 ureg
->output
[i
].usage_mask
= usage_mask
;
452 ureg
->output
[i
].first
= index
;
453 ureg
->output
[i
].last
= index
+ array_size
- 1;
454 ureg
->output
[i
].array_id
= array_id
;
455 ureg
->nr_output_regs
= MAX2(ureg
->nr_output_regs
, index
+ array_size
);
464 ureg
->output
[i
].streams
|= streams
;
466 return ureg_dst_array_register(TGSI_FILE_OUTPUT
, ureg
->output
[i
].first
,
472 ureg_DECL_output_masked(struct ureg_program
*ureg
,
479 return ureg_DECL_output_layout(ureg
, name
, index
, 0,
480 ureg
->nr_output_regs
, usage_mask
, array_id
, array_size
);
485 ureg_DECL_output(struct ureg_program
*ureg
,
489 return ureg_DECL_output_masked(ureg
, name
, index
, TGSI_WRITEMASK_XYZW
,
494 ureg_DECL_output_array(struct ureg_program
*ureg
,
495 enum tgsi_semantic semantic_name
,
496 unsigned semantic_index
,
500 return ureg_DECL_output_masked(ureg
, semantic_name
, semantic_index
,
502 array_id
, array_size
);
506 /* Returns a new constant register. Keep track of which have been
507 * referred to so that we can emit decls later.
509 * Constant operands declared with this function must be addressed
510 * with a two-dimensional index.
512 * There is nothing in this code to bind this constant to any tracked
513 * value or manage any constant_buffer contents -- that's the
514 * resposibility of the calling code.
517 ureg_DECL_constant2D(struct ureg_program
*ureg
,
522 struct const_decl
*decl
= &ureg
->const_decls
[index2D
];
524 assert(index2D
< PIPE_MAX_CONSTANT_BUFFERS
);
526 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
527 uint i
= decl
->nr_constant_ranges
++;
529 decl
->constant_range
[i
].first
= first
;
530 decl
->constant_range
[i
].last
= last
;
535 /* A one-dimensional, deprecated version of ureg_DECL_constant2D().
537 * Constant operands declared with this function must be addressed
538 * with a one-dimensional index.
541 ureg_DECL_constant(struct ureg_program
*ureg
,
544 struct const_decl
*decl
= &ureg
->const_decls
[0];
545 unsigned minconst
= index
, maxconst
= index
;
548 /* Inside existing range?
550 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
551 if (decl
->constant_range
[i
].first
<= index
&&
552 decl
->constant_range
[i
].last
>= index
) {
557 /* Extend existing range?
559 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
560 if (decl
->constant_range
[i
].last
== index
- 1) {
561 decl
->constant_range
[i
].last
= index
;
565 if (decl
->constant_range
[i
].first
== index
+ 1) {
566 decl
->constant_range
[i
].first
= index
;
570 minconst
= MIN2(minconst
, decl
->constant_range
[i
].first
);
571 maxconst
= MAX2(maxconst
, decl
->constant_range
[i
].last
);
576 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
577 i
= decl
->nr_constant_ranges
++;
578 decl
->constant_range
[i
].first
= index
;
579 decl
->constant_range
[i
].last
= index
;
583 /* Collapse all ranges down to one:
586 decl
->constant_range
[0].first
= minconst
;
587 decl
->constant_range
[0].last
= maxconst
;
588 decl
->nr_constant_ranges
= 1;
591 assert(i
< decl
->nr_constant_ranges
);
592 assert(decl
->constant_range
[i
].first
<= index
);
593 assert(decl
->constant_range
[i
].last
>= index
);
595 struct ureg_src src
= ureg_src_register(TGSI_FILE_CONSTANT
, index
);
596 return ureg_src_dimension(src
, 0);
600 /* Returns a new hw atomic register. Keep track of which have been
601 * referred to so that we can emit decls later.
604 ureg_DECL_hw_atomic(struct ureg_program
*ureg
,
610 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[buffer_id
];
612 if (decl
->nr_hw_atomic_ranges
< UREG_MAX_HW_ATOMIC_RANGE
) {
613 uint i
= decl
->nr_hw_atomic_ranges
++;
615 decl
->hw_atomic_range
[i
].first
= first
;
616 decl
->hw_atomic_range
[i
].last
= last
;
617 decl
->hw_atomic_range
[i
].array_id
= array_id
;
623 static struct ureg_dst
alloc_temporary( struct ureg_program
*ureg
,
628 /* Look for a released temporary.
630 for (i
= util_bitmask_get_first_index(ureg
->free_temps
);
631 i
!= UTIL_BITMASK_INVALID_INDEX
;
632 i
= util_bitmask_get_next_index(ureg
->free_temps
, i
+ 1)) {
633 if (util_bitmask_get(ureg
->local_temps
, i
) == local
)
637 /* Or allocate a new one.
639 if (i
== UTIL_BITMASK_INVALID_INDEX
) {
640 i
= ureg
->nr_temps
++;
643 util_bitmask_set(ureg
->local_temps
, i
);
645 /* Start a new declaration when the local flag changes */
646 if (!i
|| util_bitmask_get(ureg
->local_temps
, i
- 1) != local
)
647 util_bitmask_set(ureg
->decl_temps
, i
);
650 util_bitmask_clear(ureg
->free_temps
, i
);
652 return ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
655 struct ureg_dst
ureg_DECL_temporary( struct ureg_program
*ureg
)
657 return alloc_temporary(ureg
, FALSE
);
660 struct ureg_dst
ureg_DECL_local_temporary( struct ureg_program
*ureg
)
662 return alloc_temporary(ureg
, TRUE
);
665 struct ureg_dst
ureg_DECL_array_temporary( struct ureg_program
*ureg
,
669 unsigned i
= ureg
->nr_temps
;
670 struct ureg_dst dst
= ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
673 util_bitmask_set(ureg
->local_temps
, i
);
675 /* Always start a new declaration at the start */
676 util_bitmask_set(ureg
->decl_temps
, i
);
678 ureg
->nr_temps
+= size
;
680 /* and also at the end of the array */
681 util_bitmask_set(ureg
->decl_temps
, ureg
->nr_temps
);
683 if (ureg
->nr_array_temps
< UREG_MAX_ARRAY_TEMPS
) {
684 ureg
->array_temps
[ureg
->nr_array_temps
++] = i
;
685 dst
.ArrayID
= ureg
->nr_array_temps
;
691 void ureg_release_temporary( struct ureg_program
*ureg
,
692 struct ureg_dst tmp
)
694 if(tmp
.File
== TGSI_FILE_TEMPORARY
)
695 util_bitmask_set(ureg
->free_temps
, tmp
.Index
);
699 /* Allocate a new address register.
701 struct ureg_dst
ureg_DECL_address( struct ureg_program
*ureg
)
703 if (ureg
->nr_addrs
< UREG_MAX_ADDR
)
704 return ureg_dst_register( TGSI_FILE_ADDRESS
, ureg
->nr_addrs
++ );
707 return ureg_dst_register( TGSI_FILE_ADDRESS
, 0 );
710 /* Allocate a new sampler.
712 struct ureg_src
ureg_DECL_sampler( struct ureg_program
*ureg
,
717 for (i
= 0; i
< ureg
->nr_samplers
; i
++)
718 if (ureg
->sampler
[i
].Index
== nr
)
719 return ureg
->sampler
[i
];
721 if (i
< PIPE_MAX_SAMPLERS
) {
722 ureg
->sampler
[i
] = ureg_src_register( TGSI_FILE_SAMPLER
, nr
);
724 return ureg
->sampler
[i
];
728 return ureg
->sampler
[0];
732 * Allocate a new shader sampler view.
735 ureg_DECL_sampler_view(struct ureg_program
*ureg
,
737 enum tgsi_texture_type target
,
738 enum tgsi_return_type return_type_x
,
739 enum tgsi_return_type return_type_y
,
740 enum tgsi_return_type return_type_z
,
741 enum tgsi_return_type return_type_w
)
743 struct ureg_src reg
= ureg_src_register(TGSI_FILE_SAMPLER_VIEW
, index
);
746 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
747 if (ureg
->sampler_view
[i
].index
== index
) {
752 if (i
< PIPE_MAX_SHADER_SAMPLER_VIEWS
) {
753 ureg
->sampler_view
[i
].index
= index
;
754 ureg
->sampler_view
[i
].target
= target
;
755 ureg
->sampler_view
[i
].return_type_x
= return_type_x
;
756 ureg
->sampler_view
[i
].return_type_y
= return_type_y
;
757 ureg
->sampler_view
[i
].return_type_z
= return_type_z
;
758 ureg
->sampler_view
[i
].return_type_w
= return_type_w
;
759 ureg
->nr_sampler_views
++;
767 /* Allocate a new image.
770 ureg_DECL_image(struct ureg_program
*ureg
,
772 enum tgsi_texture_type target
,
777 struct ureg_src reg
= ureg_src_register(TGSI_FILE_IMAGE
, index
);
780 for (i
= 0; i
< ureg
->nr_images
; i
++)
781 if (ureg
->image
[i
].index
== index
)
784 if (i
< PIPE_MAX_SHADER_IMAGES
) {
785 ureg
->image
[i
].index
= index
;
786 ureg
->image
[i
].target
= target
;
787 ureg
->image
[i
].wr
= wr
;
788 ureg
->image
[i
].raw
= raw
;
789 ureg
->image
[i
].format
= format
;
798 /* Allocate a new buffer.
800 struct ureg_src
ureg_DECL_buffer(struct ureg_program
*ureg
, unsigned nr
,
803 struct ureg_src reg
= ureg_src_register(TGSI_FILE_BUFFER
, nr
);
806 for (i
= 0; i
< ureg
->nr_buffers
; i
++)
807 if (ureg
->buffer
[i
].index
== nr
)
810 if (i
< PIPE_MAX_SHADER_BUFFERS
) {
811 ureg
->buffer
[i
].index
= nr
;
812 ureg
->buffer
[i
].atomic
= atomic
;
821 /* Allocate a memory area.
823 struct ureg_src
ureg_DECL_memory(struct ureg_program
*ureg
,
824 unsigned memory_type
)
826 struct ureg_src reg
= ureg_src_register(TGSI_FILE_MEMORY
, memory_type
);
828 ureg
->use_memory
[memory_type
] = true;
833 match_or_expand_immediate64( const unsigned *v
,
840 unsigned nr2
= *pnr2
;
844 for (i
= 0; i
< nr
; i
+= 2) {
845 boolean found
= FALSE
;
847 for (j
= 0; j
< nr2
&& !found
; j
+= 2) {
848 if (v
[i
] == v2
[j
] && v
[i
+ 1] == v2
[j
+ 1]) {
849 *swizzle
|= (j
<< (i
* 2)) | ((j
+ 1) << ((i
+ 1) * 2));
859 v2
[nr2
+ 1] = v
[i
+ 1];
861 *swizzle
|= (nr2
<< (i
* 2)) | ((nr2
+ 1) << ((i
+ 1) * 2));
866 /* Actually expand immediate only when fully succeeded.
873 match_or_expand_immediate( const unsigned *v
,
880 unsigned nr2
= *pnr2
;
883 if (type
== TGSI_IMM_FLOAT64
||
884 type
== TGSI_IMM_UINT64
||
885 type
== TGSI_IMM_INT64
)
886 return match_or_expand_immediate64(v
, type
, nr
, v2
, pnr2
, swizzle
);
890 for (i
= 0; i
< nr
; i
++) {
891 boolean found
= FALSE
;
893 for (j
= 0; j
< nr2
&& !found
; j
++) {
895 *swizzle
|= j
<< (i
* 2);
906 *swizzle
|= nr2
<< (i
* 2);
911 /* Actually expand immediate only when fully succeeded.
918 static struct ureg_src
919 decl_immediate( struct ureg_program
*ureg
,
925 unsigned swizzle
= 0;
927 /* Could do a first pass where we examine all existing immediates
931 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
932 if (ureg
->immediate
[i
].type
!= type
) {
935 if (match_or_expand_immediate(v
,
938 ureg
->immediate
[i
].value
.u
,
939 &ureg
->immediate
[i
].nr
,
945 if (ureg
->nr_immediates
< UREG_MAX_IMMEDIATE
) {
946 i
= ureg
->nr_immediates
++;
947 ureg
->immediate
[i
].type
= type
;
948 if (match_or_expand_immediate(v
,
951 ureg
->immediate
[i
].value
.u
,
952 &ureg
->immediate
[i
].nr
,
961 /* Make sure that all referenced elements are from this immediate.
962 * Has the effect of making size-one immediates into scalars.
964 if (type
== TGSI_IMM_FLOAT64
||
965 type
== TGSI_IMM_UINT64
||
966 type
== TGSI_IMM_INT64
) {
967 for (j
= nr
; j
< 4; j
+=2) {
968 swizzle
|= (swizzle
& 0xf) << (j
* 2);
971 for (j
= nr
; j
< 4; j
++) {
972 swizzle
|= (swizzle
& 0x3) << (j
* 2);
975 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE
, i
),
976 (swizzle
>> 0) & 0x3,
977 (swizzle
>> 2) & 0x3,
978 (swizzle
>> 4) & 0x3,
979 (swizzle
>> 6) & 0x3);
984 ureg_DECL_immediate( struct ureg_program
*ureg
,
994 for (i
= 0; i
< nr
; i
++) {
998 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT32
);
1002 ureg_DECL_immediate_f64( struct ureg_program
*ureg
,
1012 assert((nr
/ 2) < 3);
1013 for (i
= 0; i
< nr
/ 2; i
++) {
1017 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT64
);
1021 ureg_DECL_immediate_uint( struct ureg_program
*ureg
,
1025 return decl_immediate(ureg
, v
, nr
, TGSI_IMM_UINT32
);
1030 ureg_DECL_immediate_block_uint( struct ureg_program
*ureg
,
1037 if (ureg
->nr_immediates
+ (nr
+ 3) / 4 > UREG_MAX_IMMEDIATE
) {
1039 return ureg_src_register(TGSI_FILE_IMMEDIATE
, 0);
1042 index
= ureg
->nr_immediates
;
1043 ureg
->nr_immediates
+= (nr
+ 3) / 4;
1045 for (i
= index
; i
< ureg
->nr_immediates
; i
++) {
1046 ureg
->immediate
[i
].type
= TGSI_IMM_UINT32
;
1047 ureg
->immediate
[i
].nr
= nr
> 4 ? 4 : nr
;
1048 memcpy(ureg
->immediate
[i
].value
.u
,
1049 &v
[(i
- index
) * 4],
1050 ureg
->immediate
[i
].nr
* sizeof(uint
));
1054 return ureg_src_register(TGSI_FILE_IMMEDIATE
, index
);
1059 ureg_DECL_immediate_int( struct ureg_program
*ureg
,
1063 return decl_immediate(ureg
, (const unsigned *)v
, nr
, TGSI_IMM_INT32
);
1067 ureg_DECL_immediate_uint64( struct ureg_program
*ureg
,
1077 assert((nr
/ 2) < 3);
1078 for (i
= 0; i
< nr
/ 2; i
++) {
1082 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_UINT64
);
1086 ureg_DECL_immediate_int64( struct ureg_program
*ureg
,
1096 assert((nr
/ 2) < 3);
1097 for (i
= 0; i
< nr
/ 2; i
++) {
1101 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_INT64
);
1105 ureg_emit_src( struct ureg_program
*ureg
,
1106 struct ureg_src src
)
1108 unsigned size
= 1 + (src
.Indirect
? 1 : 0) +
1109 (src
.Dimension
? (src
.DimIndirect
? 2 : 1) : 0);
1111 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1114 assert(src
.File
!= TGSI_FILE_NULL
);
1115 assert(src
.File
< TGSI_FILE_COUNT
);
1118 out
[n
].src
.File
= src
.File
;
1119 out
[n
].src
.SwizzleX
= src
.SwizzleX
;
1120 out
[n
].src
.SwizzleY
= src
.SwizzleY
;
1121 out
[n
].src
.SwizzleZ
= src
.SwizzleZ
;
1122 out
[n
].src
.SwizzleW
= src
.SwizzleW
;
1123 out
[n
].src
.Index
= src
.Index
;
1124 out
[n
].src
.Negate
= src
.Negate
;
1125 out
[0].src
.Absolute
= src
.Absolute
;
1129 out
[0].src
.Indirect
= 1;
1131 out
[n
].ind
.File
= src
.IndirectFile
;
1132 out
[n
].ind
.Swizzle
= src
.IndirectSwizzle
;
1133 out
[n
].ind
.Index
= src
.IndirectIndex
;
1134 if (!ureg
->supports_any_inout_decl_range
&&
1135 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1136 out
[n
].ind
.ArrayID
= 0;
1138 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1142 if (src
.Dimension
) {
1143 out
[0].src
.Dimension
= 1;
1144 out
[n
].dim
.Dimension
= 0;
1145 out
[n
].dim
.Padding
= 0;
1146 if (src
.DimIndirect
) {
1147 out
[n
].dim
.Indirect
= 1;
1148 out
[n
].dim
.Index
= src
.DimensionIndex
;
1151 out
[n
].ind
.File
= src
.DimIndFile
;
1152 out
[n
].ind
.Swizzle
= src
.DimIndSwizzle
;
1153 out
[n
].ind
.Index
= src
.DimIndIndex
;
1154 if (!ureg
->supports_any_inout_decl_range
&&
1155 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1156 out
[n
].ind
.ArrayID
= 0;
1158 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1160 out
[n
].dim
.Indirect
= 0;
1161 out
[n
].dim
.Index
= src
.DimensionIndex
;
1171 ureg_emit_dst( struct ureg_program
*ureg
,
1172 struct ureg_dst dst
)
1174 unsigned size
= 1 + (dst
.Indirect
? 1 : 0) +
1175 (dst
.Dimension
? (dst
.DimIndirect
? 2 : 1) : 0);
1177 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1180 assert(dst
.File
!= TGSI_FILE_NULL
);
1181 assert(dst
.File
!= TGSI_FILE_SAMPLER
);
1182 assert(dst
.File
!= TGSI_FILE_SAMPLER_VIEW
);
1183 assert(dst
.File
!= TGSI_FILE_IMMEDIATE
);
1184 assert(dst
.File
< TGSI_FILE_COUNT
);
1187 out
[n
].dst
.File
= dst
.File
;
1188 out
[n
].dst
.WriteMask
= dst
.WriteMask
;
1189 out
[n
].dst
.Indirect
= dst
.Indirect
;
1190 out
[n
].dst
.Index
= dst
.Index
;
1195 out
[n
].ind
.File
= dst
.IndirectFile
;
1196 out
[n
].ind
.Swizzle
= dst
.IndirectSwizzle
;
1197 out
[n
].ind
.Index
= dst
.IndirectIndex
;
1198 if (!ureg
->supports_any_inout_decl_range
&&
1199 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1200 out
[n
].ind
.ArrayID
= 0;
1202 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1206 if (dst
.Dimension
) {
1207 out
[0].dst
.Dimension
= 1;
1208 out
[n
].dim
.Dimension
= 0;
1209 out
[n
].dim
.Padding
= 0;
1210 if (dst
.DimIndirect
) {
1211 out
[n
].dim
.Indirect
= 1;
1212 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1215 out
[n
].ind
.File
= dst
.DimIndFile
;
1216 out
[n
].ind
.Swizzle
= dst
.DimIndSwizzle
;
1217 out
[n
].ind
.Index
= dst
.DimIndIndex
;
1218 if (!ureg
->supports_any_inout_decl_range
&&
1219 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1220 out
[n
].ind
.ArrayID
= 0;
1222 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1224 out
[n
].dim
.Indirect
= 0;
1225 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1234 static void validate( unsigned opcode
,
1239 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info( opcode
);
1242 assert(nr_dst
== info
->num_dst
);
1243 assert(nr_src
== info
->num_src
);
1248 struct ureg_emit_insn_result
1249 ureg_emit_insn(struct ureg_program
*ureg
,
1256 union tgsi_any_token
*out
;
1258 struct ureg_emit_insn_result result
;
1260 validate( opcode
, num_dst
, num_src
);
1262 out
= get_tokens( ureg
, DOMAIN_INSN
, count
);
1263 out
[0].insn
= tgsi_default_instruction();
1264 out
[0].insn
.Opcode
= opcode
;
1265 out
[0].insn
.Saturate
= saturate
;
1266 out
[0].insn
.Precise
= precise
;
1267 out
[0].insn
.NumDstRegs
= num_dst
;
1268 out
[0].insn
.NumSrcRegs
= num_src
;
1270 result
.insn_token
= ureg
->domain
[DOMAIN_INSN
].count
- count
;
1271 result
.extended_token
= result
.insn_token
;
1273 ureg
->nr_instructions
++;
1280 * Emit a label token.
1281 * \param label_token returns a token number indicating where the label
1282 * needs to be patched later. Later, this value should be passed to the
1283 * ureg_fixup_label() function.
1286 ureg_emit_label(struct ureg_program
*ureg
,
1287 unsigned extended_token
,
1288 unsigned *label_token
)
1290 union tgsi_any_token
*out
, *insn
;
1295 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1298 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1299 insn
->insn
.Label
= 1;
1301 *label_token
= ureg
->domain
[DOMAIN_INSN
].count
- 1;
1304 /* Will return a number which can be used in a label to point to the
1305 * next instruction to be emitted.
1308 ureg_get_instruction_number( struct ureg_program
*ureg
)
1310 return ureg
->nr_instructions
;
1313 /* Patch a given label (expressed as a token number) to point to a
1314 * given instruction (expressed as an instruction number).
1317 ureg_fixup_label(struct ureg_program
*ureg
,
1318 unsigned label_token
,
1319 unsigned instruction_number
)
1321 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, label_token
);
1323 out
->insn_label
.Label
= instruction_number
;
1328 ureg_emit_texture(struct ureg_program
*ureg
,
1329 unsigned extended_token
,
1330 enum tgsi_texture_type target
,
1331 enum tgsi_return_type return_type
, unsigned num_offsets
)
1333 union tgsi_any_token
*out
, *insn
;
1335 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1336 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1338 insn
->insn
.Texture
= 1;
1341 out
[0].insn_texture
.Texture
= target
;
1342 out
[0].insn_texture
.NumOffsets
= num_offsets
;
1343 out
[0].insn_texture
.ReturnType
= return_type
;
1347 ureg_emit_texture_offset(struct ureg_program
*ureg
,
1348 const struct tgsi_texture_offset
*offset
)
1350 union tgsi_any_token
*out
;
1352 out
= get_tokens( ureg
, DOMAIN_INSN
, 1);
1355 out
[0].insn_texture_offset
= *offset
;
1360 ureg_emit_memory(struct ureg_program
*ureg
,
1361 unsigned extended_token
,
1366 union tgsi_any_token
*out
, *insn
;
1368 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1369 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1371 insn
->insn
.Memory
= 1;
1374 out
[0].insn_memory
.Qualifier
= qualifier
;
1375 out
[0].insn_memory
.Texture
= texture
;
1376 out
[0].insn_memory
.Format
= format
;
1380 ureg_fixup_insn_size(struct ureg_program
*ureg
,
1383 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, insn
);
1385 assert(out
->insn
.Type
== TGSI_TOKEN_TYPE_INSTRUCTION
);
1386 out
->insn
.NrTokens
= ureg
->domain
[DOMAIN_INSN
].count
- insn
- 1;
1391 ureg_insn(struct ureg_program
*ureg
,
1393 const struct ureg_dst
*dst
,
1395 const struct ureg_src
*src
,
1399 struct ureg_emit_insn_result insn
;
1403 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1407 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1409 insn
= ureg_emit_insn(ureg
,
1416 for (i
= 0; i
< nr_dst
; i
++)
1417 ureg_emit_dst( ureg
, dst
[i
] );
1419 for (i
= 0; i
< nr_src
; i
++)
1420 ureg_emit_src( ureg
, src
[i
] );
1422 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1426 ureg_tex_insn(struct ureg_program
*ureg
,
1428 const struct ureg_dst
*dst
,
1430 enum tgsi_texture_type target
,
1431 enum tgsi_return_type return_type
,
1432 const struct tgsi_texture_offset
*texoffsets
,
1434 const struct ureg_src
*src
,
1437 struct ureg_emit_insn_result insn
;
1441 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1445 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1447 insn
= ureg_emit_insn(ureg
,
1454 ureg_emit_texture( ureg
, insn
.extended_token
, target
, return_type
,
1457 for (i
= 0; i
< nr_offset
; i
++)
1458 ureg_emit_texture_offset( ureg
, &texoffsets
[i
]);
1460 for (i
= 0; i
< nr_dst
; i
++)
1461 ureg_emit_dst( ureg
, dst
[i
] );
1463 for (i
= 0; i
< nr_src
; i
++)
1464 ureg_emit_src( ureg
, src
[i
] );
1466 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1471 ureg_memory_insn(struct ureg_program
*ureg
,
1473 const struct ureg_dst
*dst
,
1475 const struct ureg_src
*src
,
1481 struct ureg_emit_insn_result insn
;
1484 insn
= ureg_emit_insn(ureg
,
1491 ureg_emit_memory(ureg
, insn
.extended_token
, qualifier
, texture
, format
);
1493 for (i
= 0; i
< nr_dst
; i
++)
1494 ureg_emit_dst(ureg
, dst
[i
]);
1496 for (i
= 0; i
< nr_src
; i
++)
1497 ureg_emit_src(ureg
, src
[i
]);
1499 ureg_fixup_insn_size(ureg
, insn
.insn_token
);
1504 emit_decl_semantic(struct ureg_program
*ureg
,
1508 enum tgsi_semantic semantic_name
,
1509 unsigned semantic_index
,
1511 unsigned usage_mask
,
1514 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1517 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1518 out
[0].decl
.NrTokens
= 3;
1519 out
[0].decl
.File
= file
;
1520 out
[0].decl
.UsageMask
= usage_mask
;
1521 out
[0].decl
.Semantic
= 1;
1522 out
[0].decl
.Array
= array_id
!= 0;
1525 out
[1].decl_range
.First
= first
;
1526 out
[1].decl_range
.Last
= last
;
1529 out
[2].decl_semantic
.Name
= semantic_name
;
1530 out
[2].decl_semantic
.Index
= semantic_index
;
1531 out
[2].decl_semantic
.StreamX
= streams
& 3;
1532 out
[2].decl_semantic
.StreamY
= (streams
>> 2) & 3;
1533 out
[2].decl_semantic
.StreamZ
= (streams
>> 4) & 3;
1534 out
[2].decl_semantic
.StreamW
= (streams
>> 6) & 3;
1538 out
[3].array
.ArrayID
= array_id
;
1543 emit_decl_atomic_2d(struct ureg_program
*ureg
,
1549 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1552 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1553 out
[0].decl
.NrTokens
= 3;
1554 out
[0].decl
.File
= TGSI_FILE_HW_ATOMIC
;
1555 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1556 out
[0].decl
.Dimension
= 1;
1557 out
[0].decl
.Array
= array_id
!= 0;
1560 out
[1].decl_range
.First
= first
;
1561 out
[1].decl_range
.Last
= last
;
1564 out
[2].decl_dim
.Index2D
= index2D
;
1568 out
[3].array
.ArrayID
= array_id
;
1573 emit_decl_fs(struct ureg_program
*ureg
,
1577 enum tgsi_semantic semantic_name
,
1578 unsigned semantic_index
,
1579 enum tgsi_interpolate_mode interpolate
,
1580 unsigned cylindrical_wrap
,
1581 enum tgsi_interpolate_loc interpolate_location
,
1583 unsigned usage_mask
)
1585 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
,
1589 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1590 out
[0].decl
.NrTokens
= 4;
1591 out
[0].decl
.File
= file
;
1592 out
[0].decl
.UsageMask
= usage_mask
;
1593 out
[0].decl
.Interpolate
= 1;
1594 out
[0].decl
.Semantic
= 1;
1595 out
[0].decl
.Array
= array_id
!= 0;
1598 out
[1].decl_range
.First
= first
;
1599 out
[1].decl_range
.Last
= last
;
1602 out
[2].decl_interp
.Interpolate
= interpolate
;
1603 out
[2].decl_interp
.CylindricalWrap
= cylindrical_wrap
;
1604 out
[2].decl_interp
.Location
= interpolate_location
;
1607 out
[3].decl_semantic
.Name
= semantic_name
;
1608 out
[3].decl_semantic
.Index
= semantic_index
;
1612 out
[4].array
.ArrayID
= array_id
;
1617 emit_decl_temps( struct ureg_program
*ureg
,
1618 unsigned first
, unsigned last
,
1622 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
,
1626 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1627 out
[0].decl
.NrTokens
= 2;
1628 out
[0].decl
.File
= TGSI_FILE_TEMPORARY
;
1629 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1630 out
[0].decl
.Local
= local
;
1633 out
[1].decl_range
.First
= first
;
1634 out
[1].decl_range
.Last
= last
;
1637 out
[0].decl
.Array
= 1;
1639 out
[2].array
.ArrayID
= arrayid
;
1643 static void emit_decl_range( struct ureg_program
*ureg
,
1648 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
1651 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1652 out
[0].decl
.NrTokens
= 2;
1653 out
[0].decl
.File
= file
;
1654 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1655 out
[0].decl
.Semantic
= 0;
1658 out
[1].decl_range
.First
= first
;
1659 out
[1].decl_range
.Last
= first
+ count
- 1;
1663 emit_decl_range2D(struct ureg_program
*ureg
,
1669 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1672 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1673 out
[0].decl
.NrTokens
= 3;
1674 out
[0].decl
.File
= file
;
1675 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1676 out
[0].decl
.Dimension
= 1;
1679 out
[1].decl_range
.First
= first
;
1680 out
[1].decl_range
.Last
= last
;
1683 out
[2].decl_dim
.Index2D
= index2D
;
1687 emit_decl_sampler_view(struct ureg_program
*ureg
,
1689 enum tgsi_texture_type target
,
1690 enum tgsi_return_type return_type_x
,
1691 enum tgsi_return_type return_type_y
,
1692 enum tgsi_return_type return_type_z
,
1693 enum tgsi_return_type return_type_w
)
1695 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1698 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1699 out
[0].decl
.NrTokens
= 3;
1700 out
[0].decl
.File
= TGSI_FILE_SAMPLER_VIEW
;
1701 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1704 out
[1].decl_range
.First
= index
;
1705 out
[1].decl_range
.Last
= index
;
1708 out
[2].decl_sampler_view
.Resource
= target
;
1709 out
[2].decl_sampler_view
.ReturnTypeX
= return_type_x
;
1710 out
[2].decl_sampler_view
.ReturnTypeY
= return_type_y
;
1711 out
[2].decl_sampler_view
.ReturnTypeZ
= return_type_z
;
1712 out
[2].decl_sampler_view
.ReturnTypeW
= return_type_w
;
1716 emit_decl_image(struct ureg_program
*ureg
,
1718 enum tgsi_texture_type target
,
1723 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1726 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1727 out
[0].decl
.NrTokens
= 3;
1728 out
[0].decl
.File
= TGSI_FILE_IMAGE
;
1729 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1732 out
[1].decl_range
.First
= index
;
1733 out
[1].decl_range
.Last
= index
;
1736 out
[2].decl_image
.Resource
= target
;
1737 out
[2].decl_image
.Writable
= wr
;
1738 out
[2].decl_image
.Raw
= raw
;
1739 out
[2].decl_image
.Format
= format
;
1743 emit_decl_buffer(struct ureg_program
*ureg
,
1747 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1750 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1751 out
[0].decl
.NrTokens
= 2;
1752 out
[0].decl
.File
= TGSI_FILE_BUFFER
;
1753 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1754 out
[0].decl
.Atomic
= atomic
;
1757 out
[1].decl_range
.First
= index
;
1758 out
[1].decl_range
.Last
= index
;
1762 emit_decl_memory(struct ureg_program
*ureg
, unsigned memory_type
)
1764 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1767 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1768 out
[0].decl
.NrTokens
= 2;
1769 out
[0].decl
.File
= TGSI_FILE_MEMORY
;
1770 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1771 out
[0].decl
.MemType
= memory_type
;
1774 out
[1].decl_range
.First
= memory_type
;
1775 out
[1].decl_range
.Last
= memory_type
;
1779 emit_immediate( struct ureg_program
*ureg
,
1783 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 5 );
1786 out
[0].imm
.Type
= TGSI_TOKEN_TYPE_IMMEDIATE
;
1787 out
[0].imm
.NrTokens
= 5;
1788 out
[0].imm
.DataType
= type
;
1789 out
[0].imm
.Padding
= 0;
1791 out
[1].imm_data
.Uint
= v
[0];
1792 out
[2].imm_data
.Uint
= v
[1];
1793 out
[3].imm_data
.Uint
= v
[2];
1794 out
[4].imm_data
.Uint
= v
[3];
1798 emit_property(struct ureg_program
*ureg
,
1802 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1805 out
[0].prop
.Type
= TGSI_TOKEN_TYPE_PROPERTY
;
1806 out
[0].prop
.NrTokens
= 2;
1807 out
[0].prop
.PropertyName
= name
;
1809 out
[1].prop_data
.Data
= data
;
1813 static void emit_decls( struct ureg_program
*ureg
)
1817 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
1818 if (ureg
->properties
[i
] != ~0)
1819 emit_property(ureg
, i
, ureg
->properties
[i
]);
1821 if (ureg
->processor
== PIPE_SHADER_VERTEX
) {
1822 for (i
= 0; i
< PIPE_MAX_ATTRIBS
; i
++) {
1823 if (ureg
->vs_inputs
[i
/32] & (1u << (i
%32))) {
1824 emit_decl_range( ureg
, TGSI_FILE_INPUT
, i
, 1 );
1827 } else if (ureg
->processor
== PIPE_SHADER_FRAGMENT
) {
1828 if (ureg
->supports_any_inout_decl_range
) {
1829 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1832 ureg
->input
[i
].first
,
1833 ureg
->input
[i
].last
,
1834 ureg
->input
[i
].semantic_name
,
1835 ureg
->input
[i
].semantic_index
,
1836 ureg
->input
[i
].interp
,
1837 ureg
->input
[i
].cylindrical_wrap
,
1838 ureg
->input
[i
].interp_location
,
1839 ureg
->input
[i
].array_id
,
1840 ureg
->input
[i
].usage_mask
);
1844 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1845 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1849 ureg
->input
[i
].semantic_name
,
1850 ureg
->input
[i
].semantic_index
+
1851 (j
- ureg
->input
[i
].first
),
1852 ureg
->input
[i
].interp
,
1853 ureg
->input
[i
].cylindrical_wrap
,
1854 ureg
->input
[i
].interp_location
, 0,
1855 ureg
->input
[i
].usage_mask
);
1860 if (ureg
->supports_any_inout_decl_range
) {
1861 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1862 emit_decl_semantic(ureg
,
1864 ureg
->input
[i
].first
,
1865 ureg
->input
[i
].last
,
1866 ureg
->input
[i
].semantic_name
,
1867 ureg
->input
[i
].semantic_index
,
1869 TGSI_WRITEMASK_XYZW
,
1870 ureg
->input
[i
].array_id
);
1874 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1875 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1876 emit_decl_semantic(ureg
,
1879 ureg
->input
[i
].semantic_name
,
1880 ureg
->input
[i
].semantic_index
+
1881 (j
- ureg
->input
[i
].first
),
1883 TGSI_WRITEMASK_XYZW
, 0);
1889 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
1890 emit_decl_semantic(ureg
,
1891 TGSI_FILE_SYSTEM_VALUE
,
1894 ureg
->system_value
[i
].semantic_name
,
1895 ureg
->system_value
[i
].semantic_index
,
1897 TGSI_WRITEMASK_XYZW
, 0);
1900 if (ureg
->supports_any_inout_decl_range
) {
1901 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1902 emit_decl_semantic(ureg
,
1904 ureg
->output
[i
].first
,
1905 ureg
->output
[i
].last
,
1906 ureg
->output
[i
].semantic_name
,
1907 ureg
->output
[i
].semantic_index
,
1908 ureg
->output
[i
].streams
,
1909 ureg
->output
[i
].usage_mask
,
1910 ureg
->output
[i
].array_id
);
1914 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1915 for (j
= ureg
->output
[i
].first
; j
<= ureg
->output
[i
].last
; j
++) {
1916 emit_decl_semantic(ureg
,
1919 ureg
->output
[i
].semantic_name
,
1920 ureg
->output
[i
].semantic_index
+
1921 (j
- ureg
->output
[i
].first
),
1922 ureg
->output
[i
].streams
,
1923 ureg
->output
[i
].usage_mask
, 0);
1928 for (i
= 0; i
< ureg
->nr_samplers
; i
++) {
1929 emit_decl_range( ureg
,
1931 ureg
->sampler
[i
].Index
, 1 );
1934 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
1935 emit_decl_sampler_view(ureg
,
1936 ureg
->sampler_view
[i
].index
,
1937 ureg
->sampler_view
[i
].target
,
1938 ureg
->sampler_view
[i
].return_type_x
,
1939 ureg
->sampler_view
[i
].return_type_y
,
1940 ureg
->sampler_view
[i
].return_type_z
,
1941 ureg
->sampler_view
[i
].return_type_w
);
1944 for (i
= 0; i
< ureg
->nr_images
; i
++) {
1945 emit_decl_image(ureg
,
1946 ureg
->image
[i
].index
,
1947 ureg
->image
[i
].target
,
1948 ureg
->image
[i
].format
,
1950 ureg
->image
[i
].raw
);
1953 for (i
= 0; i
< ureg
->nr_buffers
; i
++) {
1954 emit_decl_buffer(ureg
, ureg
->buffer
[i
].index
, ureg
->buffer
[i
].atomic
);
1957 for (i
= 0; i
< TGSI_MEMORY_TYPE_COUNT
; i
++) {
1958 if (ureg
->use_memory
[i
])
1959 emit_decl_memory(ureg
, i
);
1962 for (i
= 0; i
< PIPE_MAX_CONSTANT_BUFFERS
; i
++) {
1963 struct const_decl
*decl
= &ureg
->const_decls
[i
];
1965 if (decl
->nr_constant_ranges
) {
1968 for (j
= 0; j
< decl
->nr_constant_ranges
; j
++) {
1969 emit_decl_range2D(ureg
,
1971 decl
->constant_range
[j
].first
,
1972 decl
->constant_range
[j
].last
,
1978 for (i
= 0; i
< PIPE_MAX_HW_ATOMIC_BUFFERS
; i
++) {
1979 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[i
];
1981 if (decl
->nr_hw_atomic_ranges
) {
1984 for (j
= 0; j
< decl
->nr_hw_atomic_ranges
; j
++) {
1985 emit_decl_atomic_2d(ureg
,
1986 decl
->hw_atomic_range
[j
].first
,
1987 decl
->hw_atomic_range
[j
].last
,
1989 decl
->hw_atomic_range
[j
].array_id
);
1994 if (ureg
->nr_temps
) {
1996 for (i
= 0; i
< ureg
->nr_temps
;) {
1997 boolean local
= util_bitmask_get(ureg
->local_temps
, i
);
1999 i
= util_bitmask_get_next_index(ureg
->decl_temps
, i
+ 1);
2000 if (i
== UTIL_BITMASK_INVALID_INDEX
)
2003 if (array
< ureg
->nr_array_temps
&& ureg
->array_temps
[array
] == first
)
2004 emit_decl_temps( ureg
, first
, i
- 1, local
, ++array
);
2006 emit_decl_temps( ureg
, first
, i
- 1, local
, 0 );
2010 if (ureg
->nr_addrs
) {
2011 emit_decl_range( ureg
,
2013 0, ureg
->nr_addrs
);
2016 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
2017 emit_immediate( ureg
,
2018 ureg
->immediate
[i
].value
.u
,
2019 ureg
->immediate
[i
].type
);
2023 /* Append the instruction tokens onto the declarations to build a
2024 * contiguous stream suitable to send to the driver.
2026 static void copy_instructions( struct ureg_program
*ureg
)
2028 unsigned nr_tokens
= ureg
->domain
[DOMAIN_INSN
].count
;
2029 union tgsi_any_token
*out
= get_tokens( ureg
,
2034 ureg
->domain
[DOMAIN_INSN
].tokens
,
2035 nr_tokens
* sizeof out
[0] );
2040 fixup_header_size(struct ureg_program
*ureg
)
2042 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_DECL
, 0 );
2044 out
->header
.BodySize
= ureg
->domain
[DOMAIN_DECL
].count
- 2;
2049 emit_header( struct ureg_program
*ureg
)
2051 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
2053 out
[0].header
.HeaderSize
= 2;
2054 out
[0].header
.BodySize
= 0;
2056 out
[1].processor
.Processor
= ureg
->processor
;
2057 out
[1].processor
.Padding
= 0;
2061 const struct tgsi_token
*ureg_finalize( struct ureg_program
*ureg
)
2063 const struct tgsi_token
*tokens
;
2065 switch (ureg
->processor
) {
2066 case PIPE_SHADER_VERTEX
:
2067 case PIPE_SHADER_TESS_EVAL
:
2068 ureg_property(ureg
, TGSI_PROPERTY_NEXT_SHADER
,
2069 ureg
->next_shader_processor
== -1 ?
2070 PIPE_SHADER_FRAGMENT
:
2071 ureg
->next_shader_processor
);
2075 emit_header( ureg
);
2077 copy_instructions( ureg
);
2078 fixup_header_size( ureg
);
2080 if (ureg
->domain
[0].tokens
== error_tokens
||
2081 ureg
->domain
[1].tokens
== error_tokens
) {
2082 debug_printf("%s: error in generated shader\n", __FUNCTION__
);
2087 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2090 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__
,
2091 ureg
->domain
[DOMAIN_DECL
].count
);
2092 tgsi_dump( tokens
, 0 );
2096 if (tokens
&& !tgsi_sanity_check(tokens
)) {
2097 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
2098 tgsi_dump(tokens
, 0);
2108 void *ureg_create_shader( struct ureg_program
*ureg
,
2109 struct pipe_context
*pipe
,
2110 const struct pipe_stream_output_info
*so
)
2112 struct pipe_shader_state state
;
2114 pipe_shader_state_from_tgsi(&state
, ureg_finalize(ureg
));
2119 state
.stream_output
= *so
;
2121 switch (ureg
->processor
) {
2122 case PIPE_SHADER_VERTEX
:
2123 return pipe
->create_vs_state(pipe
, &state
);
2124 case PIPE_SHADER_TESS_CTRL
:
2125 return pipe
->create_tcs_state(pipe
, &state
);
2126 case PIPE_SHADER_TESS_EVAL
:
2127 return pipe
->create_tes_state(pipe
, &state
);
2128 case PIPE_SHADER_GEOMETRY
:
2129 return pipe
->create_gs_state(pipe
, &state
);
2130 case PIPE_SHADER_FRAGMENT
:
2131 return pipe
->create_fs_state(pipe
, &state
);
2138 const struct tgsi_token
*ureg_get_tokens( struct ureg_program
*ureg
,
2139 unsigned *nr_tokens
)
2141 const struct tgsi_token
*tokens
;
2143 ureg_finalize(ureg
);
2145 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2148 *nr_tokens
= ureg
->domain
[DOMAIN_DECL
].count
;
2150 ureg
->domain
[DOMAIN_DECL
].tokens
= 0;
2151 ureg
->domain
[DOMAIN_DECL
].size
= 0;
2152 ureg
->domain
[DOMAIN_DECL
].order
= 0;
2153 ureg
->domain
[DOMAIN_DECL
].count
= 0;
2159 void ureg_free_tokens( const struct tgsi_token
*tokens
)
2161 FREE((struct tgsi_token
*)tokens
);
2165 struct ureg_program
*
2166 ureg_create(unsigned processor
)
2168 return ureg_create_with_screen(processor
, NULL
);
2172 struct ureg_program
*
2173 ureg_create_with_screen(unsigned processor
, struct pipe_screen
*screen
)
2176 struct ureg_program
*ureg
= CALLOC_STRUCT( ureg_program
);
2180 ureg
->processor
= processor
;
2181 ureg
->supports_any_inout_decl_range
=
2183 screen
->get_shader_param(screen
, processor
,
2184 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE
) != 0;
2185 ureg
->next_shader_processor
= -1;
2187 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
2188 ureg
->properties
[i
] = ~0;
2190 ureg
->free_temps
= util_bitmask_create();
2191 if (ureg
->free_temps
== NULL
)
2194 ureg
->local_temps
= util_bitmask_create();
2195 if (ureg
->local_temps
== NULL
)
2196 goto no_local_temps
;
2198 ureg
->decl_temps
= util_bitmask_create();
2199 if (ureg
->decl_temps
== NULL
)
2205 util_bitmask_destroy(ureg
->local_temps
);
2207 util_bitmask_destroy(ureg
->free_temps
);
2216 ureg_set_next_shader_processor(struct ureg_program
*ureg
, unsigned processor
)
2218 ureg
->next_shader_processor
= processor
;
2223 ureg_get_nr_outputs( const struct ureg_program
*ureg
)
2227 return ureg
->nr_outputs
;
2231 void ureg_destroy( struct ureg_program
*ureg
)
2235 for (i
= 0; i
< ARRAY_SIZE(ureg
->domain
); i
++) {
2236 if (ureg
->domain
[i
].tokens
&&
2237 ureg
->domain
[i
].tokens
!= error_tokens
)
2238 FREE(ureg
->domain
[i
].tokens
);
2241 util_bitmask_destroy(ureg
->free_temps
);
2242 util_bitmask_destroy(ureg
->local_temps
);
2243 util_bitmask_destroy(ureg
->decl_temps
);