1 /**************************************************************************
3 * Copyright 2009-2010 VMware, Inc.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_info.h"
35 #include "tgsi/tgsi_dump.h"
36 #include "tgsi/tgsi_sanity.h"
37 #include "util/u_debug.h"
38 #include "util/u_inlines.h"
39 #include "util/u_memory.h"
40 #include "util/u_math.h"
41 #include "util/u_bitmask.h"
43 union tgsi_any_token
{
44 struct tgsi_header header
;
45 struct tgsi_processor processor
;
46 struct tgsi_token token
;
47 struct tgsi_property prop
;
48 struct tgsi_property_data prop_data
;
49 struct tgsi_declaration decl
;
50 struct tgsi_declaration_range decl_range
;
51 struct tgsi_declaration_dimension decl_dim
;
52 struct tgsi_declaration_interp decl_interp
;
53 struct tgsi_declaration_image decl_image
;
54 struct tgsi_declaration_semantic decl_semantic
;
55 struct tgsi_declaration_sampler_view decl_sampler_view
;
56 struct tgsi_declaration_array array
;
57 struct tgsi_immediate imm
;
58 union tgsi_immediate_data imm_data
;
59 struct tgsi_instruction insn
;
60 struct tgsi_instruction_label insn_label
;
61 struct tgsi_instruction_texture insn_texture
;
62 struct tgsi_instruction_memory insn_memory
;
63 struct tgsi_texture_offset insn_texture_offset
;
64 struct tgsi_src_register src
;
65 struct tgsi_ind_register ind
;
66 struct tgsi_dimension dim
;
67 struct tgsi_dst_register dst
;
73 union tgsi_any_token
*tokens
;
79 #define UREG_MAX_INPUT (4 * PIPE_MAX_SHADER_INPUTS)
80 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
81 #define UREG_MAX_OUTPUT (4 * PIPE_MAX_SHADER_OUTPUTS)
82 #define UREG_MAX_CONSTANT_RANGE 32
83 #define UREG_MAX_HW_ATOMIC_RANGE 32
84 #define UREG_MAX_IMMEDIATE 4096
85 #define UREG_MAX_ADDR 3
86 #define UREG_MAX_ARRAY_TEMPS 256
92 } constant_range
[UREG_MAX_CONSTANT_RANGE
];
93 unsigned nr_constant_ranges
;
96 struct hw_atomic_decl
{
101 } hw_atomic_range
[UREG_MAX_HW_ATOMIC_RANGE
];
102 unsigned nr_hw_atomic_ranges
;
105 #define DOMAIN_DECL 0
106 #define DOMAIN_INSN 1
110 enum pipe_shader_type processor
;
111 bool supports_any_inout_decl_range
;
112 int next_shader_processor
;
115 enum tgsi_semantic semantic_name
;
116 unsigned semantic_index
;
117 enum tgsi_interpolate_mode interp
;
118 unsigned char cylindrical_wrap
;
119 unsigned char usage_mask
;
120 enum tgsi_interpolate_loc interp_location
;
124 } input
[UREG_MAX_INPUT
];
125 unsigned nr_inputs
, nr_input_regs
;
127 unsigned vs_inputs
[PIPE_MAX_ATTRIBS
/32];
130 enum tgsi_semantic semantic_name
;
131 unsigned semantic_index
;
132 } system_value
[UREG_MAX_SYSTEM_VALUE
];
133 unsigned nr_system_values
;
136 enum tgsi_semantic semantic_name
;
137 unsigned semantic_index
;
139 unsigned usage_mask
; /* = TGSI_WRITEMASK_* */
143 } output
[UREG_MAX_OUTPUT
];
144 unsigned nr_outputs
, nr_output_regs
;
154 } immediate
[UREG_MAX_IMMEDIATE
];
155 unsigned nr_immediates
;
157 struct ureg_src sampler
[PIPE_MAX_SAMPLERS
];
158 unsigned nr_samplers
;
162 enum tgsi_texture_type target
;
163 enum tgsi_return_type return_type_x
;
164 enum tgsi_return_type return_type_y
;
165 enum tgsi_return_type return_type_z
;
166 enum tgsi_return_type return_type_w
;
167 } sampler_view
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
168 unsigned nr_sampler_views
;
172 enum tgsi_texture_type target
;
173 enum pipe_format format
;
176 } image
[PIPE_MAX_SHADER_IMAGES
];
182 } buffer
[PIPE_MAX_SHADER_BUFFERS
];
185 struct util_bitmask
*free_temps
;
186 struct util_bitmask
*local_temps
;
187 struct util_bitmask
*decl_temps
;
190 unsigned array_temps
[UREG_MAX_ARRAY_TEMPS
];
191 unsigned nr_array_temps
;
193 struct const_decl const_decls
[PIPE_MAX_CONSTANT_BUFFERS
];
195 struct hw_atomic_decl hw_atomic_decls
[PIPE_MAX_HW_ATOMIC_BUFFERS
];
197 unsigned properties
[TGSI_PROPERTY_COUNT
];
200 unsigned nr_instructions
;
202 struct ureg_tokens domain
[2];
204 bool use_memory
[TGSI_MEMORY_TYPE_COUNT
];
207 static union tgsi_any_token error_tokens
[32];
209 static void tokens_error( struct ureg_tokens
*tokens
)
211 if (tokens
->tokens
&& tokens
->tokens
!= error_tokens
)
212 FREE(tokens
->tokens
);
214 tokens
->tokens
= error_tokens
;
215 tokens
->size
= ARRAY_SIZE(error_tokens
);
220 static void tokens_expand( struct ureg_tokens
*tokens
,
223 unsigned old_size
= tokens
->size
* sizeof(unsigned);
225 if (tokens
->tokens
== error_tokens
) {
229 while (tokens
->count
+ count
> tokens
->size
) {
230 tokens
->size
= (1 << ++tokens
->order
);
233 tokens
->tokens
= REALLOC(tokens
->tokens
,
235 tokens
->size
* sizeof(unsigned));
236 if (tokens
->tokens
== NULL
) {
237 tokens_error(tokens
);
241 static void set_bad( struct ureg_program
*ureg
)
243 tokens_error(&ureg
->domain
[0]);
248 static union tgsi_any_token
*get_tokens( struct ureg_program
*ureg
,
252 struct ureg_tokens
*tokens
= &ureg
->domain
[domain
];
253 union tgsi_any_token
*result
;
255 if (tokens
->count
+ count
> tokens
->size
)
256 tokens_expand(tokens
, count
);
258 result
= &tokens
->tokens
[tokens
->count
];
259 tokens
->count
+= count
;
264 static union tgsi_any_token
*retrieve_token( struct ureg_program
*ureg
,
268 if (ureg
->domain
[domain
].tokens
== error_tokens
)
269 return &error_tokens
[0];
271 return &ureg
->domain
[domain
].tokens
[nr
];
276 ureg_property(struct ureg_program
*ureg
, unsigned name
, unsigned value
)
278 assert(name
< ARRAY_SIZE(ureg
->properties
));
279 ureg
->properties
[name
] = value
;
283 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program
*ureg
,
284 enum tgsi_semantic semantic_name
,
285 unsigned semantic_index
,
286 enum tgsi_interpolate_mode interp_mode
,
287 unsigned cylindrical_wrap
,
288 enum tgsi_interpolate_loc interp_location
,
296 assert(usage_mask
!= 0);
297 assert(usage_mask
<= TGSI_WRITEMASK_XYZW
);
299 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
300 if (ureg
->input
[i
].semantic_name
== semantic_name
&&
301 ureg
->input
[i
].semantic_index
== semantic_index
) {
302 assert(ureg
->input
[i
].interp
== interp_mode
);
303 assert(ureg
->input
[i
].cylindrical_wrap
== cylindrical_wrap
);
304 assert(ureg
->input
[i
].interp_location
== interp_location
);
305 if (ureg
->input
[i
].array_id
== array_id
) {
306 ureg
->input
[i
].usage_mask
|= usage_mask
;
309 assert((ureg
->input
[i
].usage_mask
& usage_mask
) == 0);
313 if (ureg
->nr_inputs
< UREG_MAX_INPUT
) {
314 assert(array_size
>= 1);
315 ureg
->input
[i
].semantic_name
= semantic_name
;
316 ureg
->input
[i
].semantic_index
= semantic_index
;
317 ureg
->input
[i
].interp
= interp_mode
;
318 ureg
->input
[i
].cylindrical_wrap
= cylindrical_wrap
;
319 ureg
->input
[i
].interp_location
= interp_location
;
320 ureg
->input
[i
].first
= index
;
321 ureg
->input
[i
].last
= index
+ array_size
- 1;
322 ureg
->input
[i
].array_id
= array_id
;
323 ureg
->input
[i
].usage_mask
= usage_mask
;
324 ureg
->nr_input_regs
= MAX2(ureg
->nr_input_regs
, index
+ array_size
);
331 return ureg_src_array_register(TGSI_FILE_INPUT
, ureg
->input
[i
].first
,
336 ureg_DECL_fs_input_cyl_centroid(struct ureg_program
*ureg
,
337 enum tgsi_semantic semantic_name
,
338 unsigned semantic_index
,
339 enum tgsi_interpolate_mode interp_mode
,
340 unsigned cylindrical_wrap
,
341 enum tgsi_interpolate_loc interp_location
,
345 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
346 semantic_name
, semantic_index
, interp_mode
,
347 cylindrical_wrap
, interp_location
,
348 ureg
->nr_input_regs
, TGSI_WRITEMASK_XYZW
, array_id
, array_size
);
353 ureg_DECL_vs_input( struct ureg_program
*ureg
,
356 assert(ureg
->processor
== PIPE_SHADER_VERTEX
);
357 assert(index
/ 32 < ARRAY_SIZE(ureg
->vs_inputs
));
359 ureg
->vs_inputs
[index
/32] |= 1 << (index
% 32);
360 return ureg_src_register( TGSI_FILE_INPUT
, index
);
365 ureg_DECL_input_layout(struct ureg_program
*ureg
,
366 enum tgsi_semantic semantic_name
,
367 unsigned semantic_index
,
373 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
374 semantic_name
, semantic_index
,
375 TGSI_INTERPOLATE_CONSTANT
, 0, TGSI_INTERPOLATE_LOC_CENTER
,
376 index
, usage_mask
, array_id
, array_size
);
381 ureg_DECL_input(struct ureg_program
*ureg
,
382 enum tgsi_semantic semantic_name
,
383 unsigned semantic_index
,
387 return ureg_DECL_fs_input_cyl_centroid(ureg
, semantic_name
, semantic_index
,
388 TGSI_INTERPOLATE_CONSTANT
, 0,
389 TGSI_INTERPOLATE_LOC_CENTER
,
390 array_id
, array_size
);
395 ureg_DECL_system_value(struct ureg_program
*ureg
,
396 enum tgsi_semantic semantic_name
,
397 unsigned semantic_index
)
401 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
402 if (ureg
->system_value
[i
].semantic_name
== semantic_name
&&
403 ureg
->system_value
[i
].semantic_index
== semantic_index
) {
408 if (ureg
->nr_system_values
< UREG_MAX_SYSTEM_VALUE
) {
409 ureg
->system_value
[ureg
->nr_system_values
].semantic_name
= semantic_name
;
410 ureg
->system_value
[ureg
->nr_system_values
].semantic_index
= semantic_index
;
411 i
= ureg
->nr_system_values
;
412 ureg
->nr_system_values
++;
418 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE
, i
);
423 ureg_DECL_output_layout(struct ureg_program
*ureg
,
424 enum tgsi_semantic semantic_name
,
425 unsigned semantic_index
,
434 assert(usage_mask
!= 0);
435 assert(!(streams
& 0x03) || (usage_mask
& 1));
436 assert(!(streams
& 0x0c) || (usage_mask
& 2));
437 assert(!(streams
& 0x30) || (usage_mask
& 4));
438 assert(!(streams
& 0xc0) || (usage_mask
& 8));
440 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
441 if (ureg
->output
[i
].semantic_name
== semantic_name
&&
442 ureg
->output
[i
].semantic_index
== semantic_index
) {
443 if (ureg
->output
[i
].array_id
== array_id
) {
444 ureg
->output
[i
].usage_mask
|= usage_mask
;
447 assert((ureg
->output
[i
].usage_mask
& usage_mask
) == 0);
451 if (ureg
->nr_outputs
< UREG_MAX_OUTPUT
) {
452 ureg
->output
[i
].semantic_name
= semantic_name
;
453 ureg
->output
[i
].semantic_index
= semantic_index
;
454 ureg
->output
[i
].usage_mask
= usage_mask
;
455 ureg
->output
[i
].first
= index
;
456 ureg
->output
[i
].last
= index
+ array_size
- 1;
457 ureg
->output
[i
].array_id
= array_id
;
458 ureg
->nr_output_regs
= MAX2(ureg
->nr_output_regs
, index
+ array_size
);
467 ureg
->output
[i
].streams
|= streams
;
469 return ureg_dst_array_register(TGSI_FILE_OUTPUT
, ureg
->output
[i
].first
,
475 ureg_DECL_output_masked(struct ureg_program
*ureg
,
482 return ureg_DECL_output_layout(ureg
, name
, index
, 0,
483 ureg
->nr_output_regs
, usage_mask
, array_id
, array_size
);
488 ureg_DECL_output(struct ureg_program
*ureg
,
489 enum tgsi_semantic name
,
492 return ureg_DECL_output_masked(ureg
, name
, index
, TGSI_WRITEMASK_XYZW
,
497 ureg_DECL_output_array(struct ureg_program
*ureg
,
498 enum tgsi_semantic semantic_name
,
499 unsigned semantic_index
,
503 return ureg_DECL_output_masked(ureg
, semantic_name
, semantic_index
,
505 array_id
, array_size
);
509 /* Returns a new constant register. Keep track of which have been
510 * referred to so that we can emit decls later.
512 * Constant operands declared with this function must be addressed
513 * with a two-dimensional index.
515 * There is nothing in this code to bind this constant to any tracked
516 * value or manage any constant_buffer contents -- that's the
517 * resposibility of the calling code.
520 ureg_DECL_constant2D(struct ureg_program
*ureg
,
525 struct const_decl
*decl
= &ureg
->const_decls
[index2D
];
527 assert(index2D
< PIPE_MAX_CONSTANT_BUFFERS
);
529 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
530 uint i
= decl
->nr_constant_ranges
++;
532 decl
->constant_range
[i
].first
= first
;
533 decl
->constant_range
[i
].last
= last
;
538 /* A one-dimensional, deprecated version of ureg_DECL_constant2D().
540 * Constant operands declared with this function must be addressed
541 * with a one-dimensional index.
544 ureg_DECL_constant(struct ureg_program
*ureg
,
547 struct const_decl
*decl
= &ureg
->const_decls
[0];
548 unsigned minconst
= index
, maxconst
= index
;
551 /* Inside existing range?
553 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
554 if (decl
->constant_range
[i
].first
<= index
&&
555 decl
->constant_range
[i
].last
>= index
) {
560 /* Extend existing range?
562 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
563 if (decl
->constant_range
[i
].last
== index
- 1) {
564 decl
->constant_range
[i
].last
= index
;
568 if (decl
->constant_range
[i
].first
== index
+ 1) {
569 decl
->constant_range
[i
].first
= index
;
573 minconst
= MIN2(minconst
, decl
->constant_range
[i
].first
);
574 maxconst
= MAX2(maxconst
, decl
->constant_range
[i
].last
);
579 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
580 i
= decl
->nr_constant_ranges
++;
581 decl
->constant_range
[i
].first
= index
;
582 decl
->constant_range
[i
].last
= index
;
586 /* Collapse all ranges down to one:
589 decl
->constant_range
[0].first
= minconst
;
590 decl
->constant_range
[0].last
= maxconst
;
591 decl
->nr_constant_ranges
= 1;
594 assert(i
< decl
->nr_constant_ranges
);
595 assert(decl
->constant_range
[i
].first
<= index
);
596 assert(decl
->constant_range
[i
].last
>= index
);
598 struct ureg_src src
= ureg_src_register(TGSI_FILE_CONSTANT
, index
);
599 return ureg_src_dimension(src
, 0);
603 /* Returns a new hw atomic register. Keep track of which have been
604 * referred to so that we can emit decls later.
607 ureg_DECL_hw_atomic(struct ureg_program
*ureg
,
613 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[buffer_id
];
615 if (decl
->nr_hw_atomic_ranges
< UREG_MAX_HW_ATOMIC_RANGE
) {
616 uint i
= decl
->nr_hw_atomic_ranges
++;
618 decl
->hw_atomic_range
[i
].first
= first
;
619 decl
->hw_atomic_range
[i
].last
= last
;
620 decl
->hw_atomic_range
[i
].array_id
= array_id
;
626 static struct ureg_dst
alloc_temporary( struct ureg_program
*ureg
,
631 /* Look for a released temporary.
633 for (i
= util_bitmask_get_first_index(ureg
->free_temps
);
634 i
!= UTIL_BITMASK_INVALID_INDEX
;
635 i
= util_bitmask_get_next_index(ureg
->free_temps
, i
+ 1)) {
636 if (util_bitmask_get(ureg
->local_temps
, i
) == local
)
640 /* Or allocate a new one.
642 if (i
== UTIL_BITMASK_INVALID_INDEX
) {
643 i
= ureg
->nr_temps
++;
646 util_bitmask_set(ureg
->local_temps
, i
);
648 /* Start a new declaration when the local flag changes */
649 if (!i
|| util_bitmask_get(ureg
->local_temps
, i
- 1) != local
)
650 util_bitmask_set(ureg
->decl_temps
, i
);
653 util_bitmask_clear(ureg
->free_temps
, i
);
655 return ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
658 struct ureg_dst
ureg_DECL_temporary( struct ureg_program
*ureg
)
660 return alloc_temporary(ureg
, FALSE
);
663 struct ureg_dst
ureg_DECL_local_temporary( struct ureg_program
*ureg
)
665 return alloc_temporary(ureg
, TRUE
);
668 struct ureg_dst
ureg_DECL_array_temporary( struct ureg_program
*ureg
,
672 unsigned i
= ureg
->nr_temps
;
673 struct ureg_dst dst
= ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
676 util_bitmask_set(ureg
->local_temps
, i
);
678 /* Always start a new declaration at the start */
679 util_bitmask_set(ureg
->decl_temps
, i
);
681 ureg
->nr_temps
+= size
;
683 /* and also at the end of the array */
684 util_bitmask_set(ureg
->decl_temps
, ureg
->nr_temps
);
686 if (ureg
->nr_array_temps
< UREG_MAX_ARRAY_TEMPS
) {
687 ureg
->array_temps
[ureg
->nr_array_temps
++] = i
;
688 dst
.ArrayID
= ureg
->nr_array_temps
;
694 void ureg_release_temporary( struct ureg_program
*ureg
,
695 struct ureg_dst tmp
)
697 if(tmp
.File
== TGSI_FILE_TEMPORARY
)
698 util_bitmask_set(ureg
->free_temps
, tmp
.Index
);
702 /* Allocate a new address register.
704 struct ureg_dst
ureg_DECL_address( struct ureg_program
*ureg
)
706 if (ureg
->nr_addrs
< UREG_MAX_ADDR
)
707 return ureg_dst_register( TGSI_FILE_ADDRESS
, ureg
->nr_addrs
++ );
710 return ureg_dst_register( TGSI_FILE_ADDRESS
, 0 );
713 /* Allocate a new sampler.
715 struct ureg_src
ureg_DECL_sampler( struct ureg_program
*ureg
,
720 for (i
= 0; i
< ureg
->nr_samplers
; i
++)
721 if (ureg
->sampler
[i
].Index
== (int)nr
)
722 return ureg
->sampler
[i
];
724 if (i
< PIPE_MAX_SAMPLERS
) {
725 ureg
->sampler
[i
] = ureg_src_register( TGSI_FILE_SAMPLER
, nr
);
727 return ureg
->sampler
[i
];
731 return ureg
->sampler
[0];
735 * Allocate a new shader sampler view.
738 ureg_DECL_sampler_view(struct ureg_program
*ureg
,
740 enum tgsi_texture_type target
,
741 enum tgsi_return_type return_type_x
,
742 enum tgsi_return_type return_type_y
,
743 enum tgsi_return_type return_type_z
,
744 enum tgsi_return_type return_type_w
)
746 struct ureg_src reg
= ureg_src_register(TGSI_FILE_SAMPLER_VIEW
, index
);
749 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
750 if (ureg
->sampler_view
[i
].index
== index
) {
755 if (i
< PIPE_MAX_SHADER_SAMPLER_VIEWS
) {
756 ureg
->sampler_view
[i
].index
= index
;
757 ureg
->sampler_view
[i
].target
= target
;
758 ureg
->sampler_view
[i
].return_type_x
= return_type_x
;
759 ureg
->sampler_view
[i
].return_type_y
= return_type_y
;
760 ureg
->sampler_view
[i
].return_type_z
= return_type_z
;
761 ureg
->sampler_view
[i
].return_type_w
= return_type_w
;
762 ureg
->nr_sampler_views
++;
770 /* Allocate a new image.
773 ureg_DECL_image(struct ureg_program
*ureg
,
775 enum tgsi_texture_type target
,
776 enum pipe_format format
,
780 struct ureg_src reg
= ureg_src_register(TGSI_FILE_IMAGE
, index
);
783 for (i
= 0; i
< ureg
->nr_images
; i
++)
784 if (ureg
->image
[i
].index
== index
)
787 if (i
< PIPE_MAX_SHADER_IMAGES
) {
788 ureg
->image
[i
].index
= index
;
789 ureg
->image
[i
].target
= target
;
790 ureg
->image
[i
].wr
= wr
;
791 ureg
->image
[i
].raw
= raw
;
792 ureg
->image
[i
].format
= format
;
801 /* Allocate a new buffer.
803 struct ureg_src
ureg_DECL_buffer(struct ureg_program
*ureg
, unsigned nr
,
806 struct ureg_src reg
= ureg_src_register(TGSI_FILE_BUFFER
, nr
);
809 for (i
= 0; i
< ureg
->nr_buffers
; i
++)
810 if (ureg
->buffer
[i
].index
== nr
)
813 if (i
< PIPE_MAX_SHADER_BUFFERS
) {
814 ureg
->buffer
[i
].index
= nr
;
815 ureg
->buffer
[i
].atomic
= atomic
;
824 /* Allocate a memory area.
826 struct ureg_src
ureg_DECL_memory(struct ureg_program
*ureg
,
827 unsigned memory_type
)
829 struct ureg_src reg
= ureg_src_register(TGSI_FILE_MEMORY
, memory_type
);
831 ureg
->use_memory
[memory_type
] = true;
836 match_or_expand_immediate64( const unsigned *v
,
843 unsigned nr2
= *pnr2
;
847 for (i
= 0; i
< nr
; i
+= 2) {
848 boolean found
= FALSE
;
850 for (j
= 0; j
< nr2
&& !found
; j
+= 2) {
851 if (v
[i
] == v2
[j
] && v
[i
+ 1] == v2
[j
+ 1]) {
852 *swizzle
|= (j
<< (i
* 2)) | ((j
+ 1) << ((i
+ 1) * 2));
862 v2
[nr2
+ 1] = v
[i
+ 1];
864 *swizzle
|= (nr2
<< (i
* 2)) | ((nr2
+ 1) << ((i
+ 1) * 2));
869 /* Actually expand immediate only when fully succeeded.
876 match_or_expand_immediate( const unsigned *v
,
883 unsigned nr2
= *pnr2
;
886 if (type
== TGSI_IMM_FLOAT64
||
887 type
== TGSI_IMM_UINT64
||
888 type
== TGSI_IMM_INT64
)
889 return match_or_expand_immediate64(v
, type
, nr
, v2
, pnr2
, swizzle
);
893 for (i
= 0; i
< nr
; i
++) {
894 boolean found
= FALSE
;
896 for (j
= 0; j
< nr2
&& !found
; j
++) {
898 *swizzle
|= j
<< (i
* 2);
909 *swizzle
|= nr2
<< (i
* 2);
914 /* Actually expand immediate only when fully succeeded.
921 static struct ureg_src
922 decl_immediate( struct ureg_program
*ureg
,
928 unsigned swizzle
= 0;
930 /* Could do a first pass where we examine all existing immediates
934 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
935 if (ureg
->immediate
[i
].type
!= type
) {
938 if (match_or_expand_immediate(v
,
941 ureg
->immediate
[i
].value
.u
,
942 &ureg
->immediate
[i
].nr
,
948 if (ureg
->nr_immediates
< UREG_MAX_IMMEDIATE
) {
949 i
= ureg
->nr_immediates
++;
950 ureg
->immediate
[i
].type
= type
;
951 if (match_or_expand_immediate(v
,
954 ureg
->immediate
[i
].value
.u
,
955 &ureg
->immediate
[i
].nr
,
964 /* Make sure that all referenced elements are from this immediate.
965 * Has the effect of making size-one immediates into scalars.
967 if (type
== TGSI_IMM_FLOAT64
||
968 type
== TGSI_IMM_UINT64
||
969 type
== TGSI_IMM_INT64
) {
970 for (j
= nr
; j
< 4; j
+=2) {
971 swizzle
|= (swizzle
& 0xf) << (j
* 2);
974 for (j
= nr
; j
< 4; j
++) {
975 swizzle
|= (swizzle
& 0x3) << (j
* 2);
978 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE
, i
),
979 (swizzle
>> 0) & 0x3,
980 (swizzle
>> 2) & 0x3,
981 (swizzle
>> 4) & 0x3,
982 (swizzle
>> 6) & 0x3);
987 ureg_DECL_immediate( struct ureg_program
*ureg
,
997 for (i
= 0; i
< nr
; i
++) {
1001 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT32
);
1005 ureg_DECL_immediate_f64( struct ureg_program
*ureg
,
1015 assert((nr
/ 2) < 3);
1016 for (i
= 0; i
< nr
/ 2; i
++) {
1020 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT64
);
1024 ureg_DECL_immediate_uint( struct ureg_program
*ureg
,
1028 return decl_immediate(ureg
, v
, nr
, TGSI_IMM_UINT32
);
1033 ureg_DECL_immediate_block_uint( struct ureg_program
*ureg
,
1040 if (ureg
->nr_immediates
+ (nr
+ 3) / 4 > UREG_MAX_IMMEDIATE
) {
1042 return ureg_src_register(TGSI_FILE_IMMEDIATE
, 0);
1045 index
= ureg
->nr_immediates
;
1046 ureg
->nr_immediates
+= (nr
+ 3) / 4;
1048 for (i
= index
; i
< ureg
->nr_immediates
; i
++) {
1049 ureg
->immediate
[i
].type
= TGSI_IMM_UINT32
;
1050 ureg
->immediate
[i
].nr
= nr
> 4 ? 4 : nr
;
1051 memcpy(ureg
->immediate
[i
].value
.u
,
1052 &v
[(i
- index
) * 4],
1053 ureg
->immediate
[i
].nr
* sizeof(uint
));
1057 return ureg_src_register(TGSI_FILE_IMMEDIATE
, index
);
1062 ureg_DECL_immediate_int( struct ureg_program
*ureg
,
1066 return decl_immediate(ureg
, (const unsigned *)v
, nr
, TGSI_IMM_INT32
);
1070 ureg_DECL_immediate_uint64( struct ureg_program
*ureg
,
1080 assert((nr
/ 2) < 3);
1081 for (i
= 0; i
< nr
/ 2; i
++) {
1085 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_UINT64
);
1089 ureg_DECL_immediate_int64( struct ureg_program
*ureg
,
1099 assert((nr
/ 2) < 3);
1100 for (i
= 0; i
< nr
/ 2; i
++) {
1104 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_INT64
);
1108 ureg_emit_src( struct ureg_program
*ureg
,
1109 struct ureg_src src
)
1111 unsigned size
= 1 + (src
.Indirect
? 1 : 0) +
1112 (src
.Dimension
? (src
.DimIndirect
? 2 : 1) : 0);
1114 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1117 assert(src
.File
!= TGSI_FILE_NULL
);
1118 assert(src
.File
< TGSI_FILE_COUNT
);
1121 out
[n
].src
.File
= src
.File
;
1122 out
[n
].src
.SwizzleX
= src
.SwizzleX
;
1123 out
[n
].src
.SwizzleY
= src
.SwizzleY
;
1124 out
[n
].src
.SwizzleZ
= src
.SwizzleZ
;
1125 out
[n
].src
.SwizzleW
= src
.SwizzleW
;
1126 out
[n
].src
.Index
= src
.Index
;
1127 out
[n
].src
.Negate
= src
.Negate
;
1128 out
[0].src
.Absolute
= src
.Absolute
;
1132 out
[0].src
.Indirect
= 1;
1134 out
[n
].ind
.File
= src
.IndirectFile
;
1135 out
[n
].ind
.Swizzle
= src
.IndirectSwizzle
;
1136 out
[n
].ind
.Index
= src
.IndirectIndex
;
1137 if (!ureg
->supports_any_inout_decl_range
&&
1138 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1139 out
[n
].ind
.ArrayID
= 0;
1141 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1145 if (src
.Dimension
) {
1146 out
[0].src
.Dimension
= 1;
1147 out
[n
].dim
.Dimension
= 0;
1148 out
[n
].dim
.Padding
= 0;
1149 if (src
.DimIndirect
) {
1150 out
[n
].dim
.Indirect
= 1;
1151 out
[n
].dim
.Index
= src
.DimensionIndex
;
1154 out
[n
].ind
.File
= src
.DimIndFile
;
1155 out
[n
].ind
.Swizzle
= src
.DimIndSwizzle
;
1156 out
[n
].ind
.Index
= src
.DimIndIndex
;
1157 if (!ureg
->supports_any_inout_decl_range
&&
1158 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1159 out
[n
].ind
.ArrayID
= 0;
1161 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1163 out
[n
].dim
.Indirect
= 0;
1164 out
[n
].dim
.Index
= src
.DimensionIndex
;
1174 ureg_emit_dst( struct ureg_program
*ureg
,
1175 struct ureg_dst dst
)
1177 unsigned size
= 1 + (dst
.Indirect
? 1 : 0) +
1178 (dst
.Dimension
? (dst
.DimIndirect
? 2 : 1) : 0);
1180 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1183 assert(dst
.File
!= TGSI_FILE_NULL
);
1184 assert(dst
.File
!= TGSI_FILE_SAMPLER
);
1185 assert(dst
.File
!= TGSI_FILE_SAMPLER_VIEW
);
1186 assert(dst
.File
!= TGSI_FILE_IMMEDIATE
);
1187 assert(dst
.File
< TGSI_FILE_COUNT
);
1190 out
[n
].dst
.File
= dst
.File
;
1191 out
[n
].dst
.WriteMask
= dst
.WriteMask
;
1192 out
[n
].dst
.Indirect
= dst
.Indirect
;
1193 out
[n
].dst
.Index
= dst
.Index
;
1198 out
[n
].ind
.File
= dst
.IndirectFile
;
1199 out
[n
].ind
.Swizzle
= dst
.IndirectSwizzle
;
1200 out
[n
].ind
.Index
= dst
.IndirectIndex
;
1201 if (!ureg
->supports_any_inout_decl_range
&&
1202 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1203 out
[n
].ind
.ArrayID
= 0;
1205 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1209 if (dst
.Dimension
) {
1210 out
[0].dst
.Dimension
= 1;
1211 out
[n
].dim
.Dimension
= 0;
1212 out
[n
].dim
.Padding
= 0;
1213 if (dst
.DimIndirect
) {
1214 out
[n
].dim
.Indirect
= 1;
1215 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1218 out
[n
].ind
.File
= dst
.DimIndFile
;
1219 out
[n
].ind
.Swizzle
= dst
.DimIndSwizzle
;
1220 out
[n
].ind
.Index
= dst
.DimIndIndex
;
1221 if (!ureg
->supports_any_inout_decl_range
&&
1222 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1223 out
[n
].ind
.ArrayID
= 0;
1225 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1227 out
[n
].dim
.Indirect
= 0;
1228 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1237 static void validate( enum tgsi_opcode opcode
,
1242 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info( opcode
);
1245 assert(nr_dst
== info
->num_dst
);
1246 assert(nr_src
== info
->num_src
);
1251 struct ureg_emit_insn_result
1252 ureg_emit_insn(struct ureg_program
*ureg
,
1253 enum tgsi_opcode opcode
,
1259 union tgsi_any_token
*out
;
1261 struct ureg_emit_insn_result result
;
1263 validate( opcode
, num_dst
, num_src
);
1265 out
= get_tokens( ureg
, DOMAIN_INSN
, count
);
1266 out
[0].insn
= tgsi_default_instruction();
1267 out
[0].insn
.Opcode
= opcode
;
1268 out
[0].insn
.Saturate
= saturate
;
1269 out
[0].insn
.Precise
= precise
;
1270 out
[0].insn
.NumDstRegs
= num_dst
;
1271 out
[0].insn
.NumSrcRegs
= num_src
;
1273 result
.insn_token
= ureg
->domain
[DOMAIN_INSN
].count
- count
;
1274 result
.extended_token
= result
.insn_token
;
1276 ureg
->nr_instructions
++;
1283 * Emit a label token.
1284 * \param label_token returns a token number indicating where the label
1285 * needs to be patched later. Later, this value should be passed to the
1286 * ureg_fixup_label() function.
1289 ureg_emit_label(struct ureg_program
*ureg
,
1290 unsigned extended_token
,
1291 unsigned *label_token
)
1293 union tgsi_any_token
*out
, *insn
;
1298 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1301 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1302 insn
->insn
.Label
= 1;
1304 *label_token
= ureg
->domain
[DOMAIN_INSN
].count
- 1;
1307 /* Will return a number which can be used in a label to point to the
1308 * next instruction to be emitted.
1311 ureg_get_instruction_number( struct ureg_program
*ureg
)
1313 return ureg
->nr_instructions
;
1316 /* Patch a given label (expressed as a token number) to point to a
1317 * given instruction (expressed as an instruction number).
1320 ureg_fixup_label(struct ureg_program
*ureg
,
1321 unsigned label_token
,
1322 unsigned instruction_number
)
1324 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, label_token
);
1326 out
->insn_label
.Label
= instruction_number
;
1331 ureg_emit_texture(struct ureg_program
*ureg
,
1332 unsigned extended_token
,
1333 enum tgsi_texture_type target
,
1334 enum tgsi_return_type return_type
, unsigned num_offsets
)
1336 union tgsi_any_token
*out
, *insn
;
1338 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1339 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1341 insn
->insn
.Texture
= 1;
1344 out
[0].insn_texture
.Texture
= target
;
1345 out
[0].insn_texture
.NumOffsets
= num_offsets
;
1346 out
[0].insn_texture
.ReturnType
= return_type
;
1350 ureg_emit_texture_offset(struct ureg_program
*ureg
,
1351 const struct tgsi_texture_offset
*offset
)
1353 union tgsi_any_token
*out
;
1355 out
= get_tokens( ureg
, DOMAIN_INSN
, 1);
1358 out
[0].insn_texture_offset
= *offset
;
1363 ureg_emit_memory(struct ureg_program
*ureg
,
1364 unsigned extended_token
,
1366 enum tgsi_texture_type texture
,
1367 enum pipe_format format
)
1369 union tgsi_any_token
*out
, *insn
;
1371 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1372 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1374 insn
->insn
.Memory
= 1;
1377 out
[0].insn_memory
.Qualifier
= qualifier
;
1378 out
[0].insn_memory
.Texture
= texture
;
1379 out
[0].insn_memory
.Format
= format
;
1383 ureg_fixup_insn_size(struct ureg_program
*ureg
,
1386 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, insn
);
1388 assert(out
->insn
.Type
== TGSI_TOKEN_TYPE_INSTRUCTION
);
1389 out
->insn
.NrTokens
= ureg
->domain
[DOMAIN_INSN
].count
- insn
- 1;
1394 ureg_insn(struct ureg_program
*ureg
,
1395 enum tgsi_opcode opcode
,
1396 const struct ureg_dst
*dst
,
1398 const struct ureg_src
*src
,
1402 struct ureg_emit_insn_result insn
;
1406 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1410 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1412 insn
= ureg_emit_insn(ureg
,
1419 for (i
= 0; i
< nr_dst
; i
++)
1420 ureg_emit_dst( ureg
, dst
[i
] );
1422 for (i
= 0; i
< nr_src
; i
++)
1423 ureg_emit_src( ureg
, src
[i
] );
1425 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1429 ureg_tex_insn(struct ureg_program
*ureg
,
1430 enum tgsi_opcode opcode
,
1431 const struct ureg_dst
*dst
,
1433 enum tgsi_texture_type target
,
1434 enum tgsi_return_type return_type
,
1435 const struct tgsi_texture_offset
*texoffsets
,
1437 const struct ureg_src
*src
,
1440 struct ureg_emit_insn_result insn
;
1444 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1448 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1450 insn
= ureg_emit_insn(ureg
,
1457 ureg_emit_texture( ureg
, insn
.extended_token
, target
, return_type
,
1460 for (i
= 0; i
< nr_offset
; i
++)
1461 ureg_emit_texture_offset( ureg
, &texoffsets
[i
]);
1463 for (i
= 0; i
< nr_dst
; i
++)
1464 ureg_emit_dst( ureg
, dst
[i
] );
1466 for (i
= 0; i
< nr_src
; i
++)
1467 ureg_emit_src( ureg
, src
[i
] );
1469 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1474 ureg_memory_insn(struct ureg_program
*ureg
,
1475 enum tgsi_opcode opcode
,
1476 const struct ureg_dst
*dst
,
1478 const struct ureg_src
*src
,
1481 enum tgsi_texture_type texture
,
1482 enum pipe_format format
)
1484 struct ureg_emit_insn_result insn
;
1487 insn
= ureg_emit_insn(ureg
,
1494 ureg_emit_memory(ureg
, insn
.extended_token
, qualifier
, texture
, format
);
1496 for (i
= 0; i
< nr_dst
; i
++)
1497 ureg_emit_dst(ureg
, dst
[i
]);
1499 for (i
= 0; i
< nr_src
; i
++)
1500 ureg_emit_src(ureg
, src
[i
]);
1502 ureg_fixup_insn_size(ureg
, insn
.insn_token
);
1507 emit_decl_semantic(struct ureg_program
*ureg
,
1511 enum tgsi_semantic semantic_name
,
1512 unsigned semantic_index
,
1514 unsigned usage_mask
,
1517 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1520 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1521 out
[0].decl
.NrTokens
= 3;
1522 out
[0].decl
.File
= file
;
1523 out
[0].decl
.UsageMask
= usage_mask
;
1524 out
[0].decl
.Semantic
= 1;
1525 out
[0].decl
.Array
= array_id
!= 0;
1528 out
[1].decl_range
.First
= first
;
1529 out
[1].decl_range
.Last
= last
;
1532 out
[2].decl_semantic
.Name
= semantic_name
;
1533 out
[2].decl_semantic
.Index
= semantic_index
;
1534 out
[2].decl_semantic
.StreamX
= streams
& 3;
1535 out
[2].decl_semantic
.StreamY
= (streams
>> 2) & 3;
1536 out
[2].decl_semantic
.StreamZ
= (streams
>> 4) & 3;
1537 out
[2].decl_semantic
.StreamW
= (streams
>> 6) & 3;
1541 out
[3].array
.ArrayID
= array_id
;
1546 emit_decl_atomic_2d(struct ureg_program
*ureg
,
1552 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1555 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1556 out
[0].decl
.NrTokens
= 3;
1557 out
[0].decl
.File
= TGSI_FILE_HW_ATOMIC
;
1558 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1559 out
[0].decl
.Dimension
= 1;
1560 out
[0].decl
.Array
= array_id
!= 0;
1563 out
[1].decl_range
.First
= first
;
1564 out
[1].decl_range
.Last
= last
;
1567 out
[2].decl_dim
.Index2D
= index2D
;
1571 out
[3].array
.ArrayID
= array_id
;
1576 emit_decl_fs(struct ureg_program
*ureg
,
1580 enum tgsi_semantic semantic_name
,
1581 unsigned semantic_index
,
1582 enum tgsi_interpolate_mode interpolate
,
1583 unsigned cylindrical_wrap
,
1584 enum tgsi_interpolate_loc interpolate_location
,
1586 unsigned usage_mask
)
1588 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
,
1592 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1593 out
[0].decl
.NrTokens
= 4;
1594 out
[0].decl
.File
= file
;
1595 out
[0].decl
.UsageMask
= usage_mask
;
1596 out
[0].decl
.Interpolate
= 1;
1597 out
[0].decl
.Semantic
= 1;
1598 out
[0].decl
.Array
= array_id
!= 0;
1601 out
[1].decl_range
.First
= first
;
1602 out
[1].decl_range
.Last
= last
;
1605 out
[2].decl_interp
.Interpolate
= interpolate
;
1606 out
[2].decl_interp
.CylindricalWrap
= cylindrical_wrap
;
1607 out
[2].decl_interp
.Location
= interpolate_location
;
1610 out
[3].decl_semantic
.Name
= semantic_name
;
1611 out
[3].decl_semantic
.Index
= semantic_index
;
1615 out
[4].array
.ArrayID
= array_id
;
1620 emit_decl_temps( struct ureg_program
*ureg
,
1621 unsigned first
, unsigned last
,
1625 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
,
1629 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1630 out
[0].decl
.NrTokens
= 2;
1631 out
[0].decl
.File
= TGSI_FILE_TEMPORARY
;
1632 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1633 out
[0].decl
.Local
= local
;
1636 out
[1].decl_range
.First
= first
;
1637 out
[1].decl_range
.Last
= last
;
1640 out
[0].decl
.Array
= 1;
1642 out
[2].array
.ArrayID
= arrayid
;
1646 static void emit_decl_range( struct ureg_program
*ureg
,
1651 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
1654 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1655 out
[0].decl
.NrTokens
= 2;
1656 out
[0].decl
.File
= file
;
1657 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1658 out
[0].decl
.Semantic
= 0;
1661 out
[1].decl_range
.First
= first
;
1662 out
[1].decl_range
.Last
= first
+ count
- 1;
1666 emit_decl_range2D(struct ureg_program
*ureg
,
1672 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1675 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1676 out
[0].decl
.NrTokens
= 3;
1677 out
[0].decl
.File
= file
;
1678 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1679 out
[0].decl
.Dimension
= 1;
1682 out
[1].decl_range
.First
= first
;
1683 out
[1].decl_range
.Last
= last
;
1686 out
[2].decl_dim
.Index2D
= index2D
;
1690 emit_decl_sampler_view(struct ureg_program
*ureg
,
1692 enum tgsi_texture_type target
,
1693 enum tgsi_return_type return_type_x
,
1694 enum tgsi_return_type return_type_y
,
1695 enum tgsi_return_type return_type_z
,
1696 enum tgsi_return_type return_type_w
)
1698 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1701 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1702 out
[0].decl
.NrTokens
= 3;
1703 out
[0].decl
.File
= TGSI_FILE_SAMPLER_VIEW
;
1704 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1707 out
[1].decl_range
.First
= index
;
1708 out
[1].decl_range
.Last
= index
;
1711 out
[2].decl_sampler_view
.Resource
= target
;
1712 out
[2].decl_sampler_view
.ReturnTypeX
= return_type_x
;
1713 out
[2].decl_sampler_view
.ReturnTypeY
= return_type_y
;
1714 out
[2].decl_sampler_view
.ReturnTypeZ
= return_type_z
;
1715 out
[2].decl_sampler_view
.ReturnTypeW
= return_type_w
;
1719 emit_decl_image(struct ureg_program
*ureg
,
1721 enum tgsi_texture_type target
,
1722 enum pipe_format format
,
1726 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1729 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1730 out
[0].decl
.NrTokens
= 3;
1731 out
[0].decl
.File
= TGSI_FILE_IMAGE
;
1732 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1735 out
[1].decl_range
.First
= index
;
1736 out
[1].decl_range
.Last
= index
;
1739 out
[2].decl_image
.Resource
= target
;
1740 out
[2].decl_image
.Writable
= wr
;
1741 out
[2].decl_image
.Raw
= raw
;
1742 out
[2].decl_image
.Format
= format
;
1746 emit_decl_buffer(struct ureg_program
*ureg
,
1750 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1753 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1754 out
[0].decl
.NrTokens
= 2;
1755 out
[0].decl
.File
= TGSI_FILE_BUFFER
;
1756 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1757 out
[0].decl
.Atomic
= atomic
;
1760 out
[1].decl_range
.First
= index
;
1761 out
[1].decl_range
.Last
= index
;
1765 emit_decl_memory(struct ureg_program
*ureg
, unsigned memory_type
)
1767 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1770 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1771 out
[0].decl
.NrTokens
= 2;
1772 out
[0].decl
.File
= TGSI_FILE_MEMORY
;
1773 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1774 out
[0].decl
.MemType
= memory_type
;
1777 out
[1].decl_range
.First
= memory_type
;
1778 out
[1].decl_range
.Last
= memory_type
;
1782 emit_immediate( struct ureg_program
*ureg
,
1786 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 5 );
1789 out
[0].imm
.Type
= TGSI_TOKEN_TYPE_IMMEDIATE
;
1790 out
[0].imm
.NrTokens
= 5;
1791 out
[0].imm
.DataType
= type
;
1792 out
[0].imm
.Padding
= 0;
1794 out
[1].imm_data
.Uint
= v
[0];
1795 out
[2].imm_data
.Uint
= v
[1];
1796 out
[3].imm_data
.Uint
= v
[2];
1797 out
[4].imm_data
.Uint
= v
[3];
1801 emit_property(struct ureg_program
*ureg
,
1805 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1808 out
[0].prop
.Type
= TGSI_TOKEN_TYPE_PROPERTY
;
1809 out
[0].prop
.NrTokens
= 2;
1810 out
[0].prop
.PropertyName
= name
;
1812 out
[1].prop_data
.Data
= data
;
1816 static void emit_decls( struct ureg_program
*ureg
)
1820 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
1821 if (ureg
->properties
[i
] != ~0u)
1822 emit_property(ureg
, i
, ureg
->properties
[i
]);
1824 if (ureg
->processor
== PIPE_SHADER_VERTEX
) {
1825 for (i
= 0; i
< PIPE_MAX_ATTRIBS
; i
++) {
1826 if (ureg
->vs_inputs
[i
/32] & (1u << (i
%32))) {
1827 emit_decl_range( ureg
, TGSI_FILE_INPUT
, i
, 1 );
1830 } else if (ureg
->processor
== PIPE_SHADER_FRAGMENT
) {
1831 if (ureg
->supports_any_inout_decl_range
) {
1832 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1835 ureg
->input
[i
].first
,
1836 ureg
->input
[i
].last
,
1837 ureg
->input
[i
].semantic_name
,
1838 ureg
->input
[i
].semantic_index
,
1839 ureg
->input
[i
].interp
,
1840 ureg
->input
[i
].cylindrical_wrap
,
1841 ureg
->input
[i
].interp_location
,
1842 ureg
->input
[i
].array_id
,
1843 ureg
->input
[i
].usage_mask
);
1847 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1848 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1852 ureg
->input
[i
].semantic_name
,
1853 ureg
->input
[i
].semantic_index
+
1854 (j
- ureg
->input
[i
].first
),
1855 ureg
->input
[i
].interp
,
1856 ureg
->input
[i
].cylindrical_wrap
,
1857 ureg
->input
[i
].interp_location
, 0,
1858 ureg
->input
[i
].usage_mask
);
1863 if (ureg
->supports_any_inout_decl_range
) {
1864 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1865 emit_decl_semantic(ureg
,
1867 ureg
->input
[i
].first
,
1868 ureg
->input
[i
].last
,
1869 ureg
->input
[i
].semantic_name
,
1870 ureg
->input
[i
].semantic_index
,
1872 TGSI_WRITEMASK_XYZW
,
1873 ureg
->input
[i
].array_id
);
1877 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1878 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1879 emit_decl_semantic(ureg
,
1882 ureg
->input
[i
].semantic_name
,
1883 ureg
->input
[i
].semantic_index
+
1884 (j
- ureg
->input
[i
].first
),
1886 TGSI_WRITEMASK_XYZW
, 0);
1892 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
1893 emit_decl_semantic(ureg
,
1894 TGSI_FILE_SYSTEM_VALUE
,
1897 ureg
->system_value
[i
].semantic_name
,
1898 ureg
->system_value
[i
].semantic_index
,
1900 TGSI_WRITEMASK_XYZW
, 0);
1903 if (ureg
->supports_any_inout_decl_range
) {
1904 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1905 emit_decl_semantic(ureg
,
1907 ureg
->output
[i
].first
,
1908 ureg
->output
[i
].last
,
1909 ureg
->output
[i
].semantic_name
,
1910 ureg
->output
[i
].semantic_index
,
1911 ureg
->output
[i
].streams
,
1912 ureg
->output
[i
].usage_mask
,
1913 ureg
->output
[i
].array_id
);
1917 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1918 for (j
= ureg
->output
[i
].first
; j
<= ureg
->output
[i
].last
; j
++) {
1919 emit_decl_semantic(ureg
,
1922 ureg
->output
[i
].semantic_name
,
1923 ureg
->output
[i
].semantic_index
+
1924 (j
- ureg
->output
[i
].first
),
1925 ureg
->output
[i
].streams
,
1926 ureg
->output
[i
].usage_mask
, 0);
1931 for (i
= 0; i
< ureg
->nr_samplers
; i
++) {
1932 emit_decl_range( ureg
,
1934 ureg
->sampler
[i
].Index
, 1 );
1937 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
1938 emit_decl_sampler_view(ureg
,
1939 ureg
->sampler_view
[i
].index
,
1940 ureg
->sampler_view
[i
].target
,
1941 ureg
->sampler_view
[i
].return_type_x
,
1942 ureg
->sampler_view
[i
].return_type_y
,
1943 ureg
->sampler_view
[i
].return_type_z
,
1944 ureg
->sampler_view
[i
].return_type_w
);
1947 for (i
= 0; i
< ureg
->nr_images
; i
++) {
1948 emit_decl_image(ureg
,
1949 ureg
->image
[i
].index
,
1950 ureg
->image
[i
].target
,
1951 ureg
->image
[i
].format
,
1953 ureg
->image
[i
].raw
);
1956 for (i
= 0; i
< ureg
->nr_buffers
; i
++) {
1957 emit_decl_buffer(ureg
, ureg
->buffer
[i
].index
, ureg
->buffer
[i
].atomic
);
1960 for (i
= 0; i
< TGSI_MEMORY_TYPE_COUNT
; i
++) {
1961 if (ureg
->use_memory
[i
])
1962 emit_decl_memory(ureg
, i
);
1965 for (i
= 0; i
< PIPE_MAX_CONSTANT_BUFFERS
; i
++) {
1966 struct const_decl
*decl
= &ureg
->const_decls
[i
];
1968 if (decl
->nr_constant_ranges
) {
1971 for (j
= 0; j
< decl
->nr_constant_ranges
; j
++) {
1972 emit_decl_range2D(ureg
,
1974 decl
->constant_range
[j
].first
,
1975 decl
->constant_range
[j
].last
,
1981 for (i
= 0; i
< PIPE_MAX_HW_ATOMIC_BUFFERS
; i
++) {
1982 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[i
];
1984 if (decl
->nr_hw_atomic_ranges
) {
1987 for (j
= 0; j
< decl
->nr_hw_atomic_ranges
; j
++) {
1988 emit_decl_atomic_2d(ureg
,
1989 decl
->hw_atomic_range
[j
].first
,
1990 decl
->hw_atomic_range
[j
].last
,
1992 decl
->hw_atomic_range
[j
].array_id
);
1997 if (ureg
->nr_temps
) {
1999 for (i
= 0; i
< ureg
->nr_temps
;) {
2000 boolean local
= util_bitmask_get(ureg
->local_temps
, i
);
2002 i
= util_bitmask_get_next_index(ureg
->decl_temps
, i
+ 1);
2003 if (i
== UTIL_BITMASK_INVALID_INDEX
)
2006 if (array
< ureg
->nr_array_temps
&& ureg
->array_temps
[array
] == first
)
2007 emit_decl_temps( ureg
, first
, i
- 1, local
, ++array
);
2009 emit_decl_temps( ureg
, first
, i
- 1, local
, 0 );
2013 if (ureg
->nr_addrs
) {
2014 emit_decl_range( ureg
,
2016 0, ureg
->nr_addrs
);
2019 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
2020 emit_immediate( ureg
,
2021 ureg
->immediate
[i
].value
.u
,
2022 ureg
->immediate
[i
].type
);
2026 /* Append the instruction tokens onto the declarations to build a
2027 * contiguous stream suitable to send to the driver.
2029 static void copy_instructions( struct ureg_program
*ureg
)
2031 unsigned nr_tokens
= ureg
->domain
[DOMAIN_INSN
].count
;
2032 union tgsi_any_token
*out
= get_tokens( ureg
,
2037 ureg
->domain
[DOMAIN_INSN
].tokens
,
2038 nr_tokens
* sizeof out
[0] );
2043 fixup_header_size(struct ureg_program
*ureg
)
2045 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_DECL
, 0 );
2047 out
->header
.BodySize
= ureg
->domain
[DOMAIN_DECL
].count
- 2;
2052 emit_header( struct ureg_program
*ureg
)
2054 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
2056 out
[0].header
.HeaderSize
= 2;
2057 out
[0].header
.BodySize
= 0;
2059 out
[1].processor
.Processor
= ureg
->processor
;
2060 out
[1].processor
.Padding
= 0;
2064 const struct tgsi_token
*ureg_finalize( struct ureg_program
*ureg
)
2066 const struct tgsi_token
*tokens
;
2068 switch (ureg
->processor
) {
2069 case PIPE_SHADER_VERTEX
:
2070 case PIPE_SHADER_TESS_EVAL
:
2071 ureg_property(ureg
, TGSI_PROPERTY_NEXT_SHADER
,
2072 ureg
->next_shader_processor
== -1 ?
2073 PIPE_SHADER_FRAGMENT
:
2074 ureg
->next_shader_processor
);
2080 emit_header( ureg
);
2082 copy_instructions( ureg
);
2083 fixup_header_size( ureg
);
2085 if (ureg
->domain
[0].tokens
== error_tokens
||
2086 ureg
->domain
[1].tokens
== error_tokens
) {
2087 debug_printf("%s: error in generated shader\n", __FUNCTION__
);
2092 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2095 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__
,
2096 ureg
->domain
[DOMAIN_DECL
].count
);
2097 tgsi_dump( tokens
, 0 );
2101 if (tokens
&& !tgsi_sanity_check(tokens
)) {
2102 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
2103 tgsi_dump(tokens
, 0);
2113 void *ureg_create_shader( struct ureg_program
*ureg
,
2114 struct pipe_context
*pipe
,
2115 const struct pipe_stream_output_info
*so
)
2117 struct pipe_shader_state state
;
2119 pipe_shader_state_from_tgsi(&state
, ureg_finalize(ureg
));
2124 state
.stream_output
= *so
;
2126 switch (ureg
->processor
) {
2127 case PIPE_SHADER_VERTEX
:
2128 return pipe
->create_vs_state(pipe
, &state
);
2129 case PIPE_SHADER_TESS_CTRL
:
2130 return pipe
->create_tcs_state(pipe
, &state
);
2131 case PIPE_SHADER_TESS_EVAL
:
2132 return pipe
->create_tes_state(pipe
, &state
);
2133 case PIPE_SHADER_GEOMETRY
:
2134 return pipe
->create_gs_state(pipe
, &state
);
2135 case PIPE_SHADER_FRAGMENT
:
2136 return pipe
->create_fs_state(pipe
, &state
);
2143 const struct tgsi_token
*ureg_get_tokens( struct ureg_program
*ureg
,
2144 unsigned *nr_tokens
)
2146 const struct tgsi_token
*tokens
;
2148 ureg_finalize(ureg
);
2150 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2153 *nr_tokens
= ureg
->domain
[DOMAIN_DECL
].count
;
2155 ureg
->domain
[DOMAIN_DECL
].tokens
= 0;
2156 ureg
->domain
[DOMAIN_DECL
].size
= 0;
2157 ureg
->domain
[DOMAIN_DECL
].order
= 0;
2158 ureg
->domain
[DOMAIN_DECL
].count
= 0;
2164 void ureg_free_tokens( const struct tgsi_token
*tokens
)
2166 FREE((struct tgsi_token
*)tokens
);
2170 struct ureg_program
*
2171 ureg_create(enum pipe_shader_type processor
)
2173 return ureg_create_with_screen(processor
, NULL
);
2177 struct ureg_program
*
2178 ureg_create_with_screen(enum pipe_shader_type processor
,
2179 struct pipe_screen
*screen
)
2182 struct ureg_program
*ureg
= CALLOC_STRUCT( ureg_program
);
2186 ureg
->processor
= processor
;
2187 ureg
->supports_any_inout_decl_range
=
2189 screen
->get_shader_param(screen
, processor
,
2190 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE
) != 0;
2191 ureg
->next_shader_processor
= -1;
2193 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
2194 ureg
->properties
[i
] = ~0;
2196 ureg
->free_temps
= util_bitmask_create();
2197 if (ureg
->free_temps
== NULL
)
2200 ureg
->local_temps
= util_bitmask_create();
2201 if (ureg
->local_temps
== NULL
)
2202 goto no_local_temps
;
2204 ureg
->decl_temps
= util_bitmask_create();
2205 if (ureg
->decl_temps
== NULL
)
2211 util_bitmask_destroy(ureg
->local_temps
);
2213 util_bitmask_destroy(ureg
->free_temps
);
2222 ureg_set_next_shader_processor(struct ureg_program
*ureg
, unsigned processor
)
2224 ureg
->next_shader_processor
= processor
;
2229 ureg_get_nr_outputs( const struct ureg_program
*ureg
)
2233 return ureg
->nr_outputs
;
2237 void ureg_destroy( struct ureg_program
*ureg
)
2241 for (i
= 0; i
< ARRAY_SIZE(ureg
->domain
); i
++) {
2242 if (ureg
->domain
[i
].tokens
&&
2243 ureg
->domain
[i
].tokens
!= error_tokens
)
2244 FREE(ureg
->domain
[i
].tokens
);
2247 util_bitmask_destroy(ureg
->free_temps
);
2248 util_bitmask_destroy(ureg
->local_temps
);
2249 util_bitmask_destroy(ureg
->decl_temps
);