1 /**************************************************************************
3 * Copyright 2009-2010 VMware, Inc.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 **************************************************************************/
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_info.h"
35 #include "tgsi/tgsi_dump.h"
36 #include "tgsi/tgsi_sanity.h"
37 #include "util/u_debug.h"
38 #include "util/u_inlines.h"
39 #include "util/u_memory.h"
40 #include "util/u_math.h"
41 #include "util/u_bitmask.h"
43 union tgsi_any_token
{
44 struct tgsi_header header
;
45 struct tgsi_processor processor
;
46 struct tgsi_token token
;
47 struct tgsi_property prop
;
48 struct tgsi_property_data prop_data
;
49 struct tgsi_declaration decl
;
50 struct tgsi_declaration_range decl_range
;
51 struct tgsi_declaration_dimension decl_dim
;
52 struct tgsi_declaration_interp decl_interp
;
53 struct tgsi_declaration_image decl_image
;
54 struct tgsi_declaration_semantic decl_semantic
;
55 struct tgsi_declaration_sampler_view decl_sampler_view
;
56 struct tgsi_declaration_array array
;
57 struct tgsi_immediate imm
;
58 union tgsi_immediate_data imm_data
;
59 struct tgsi_instruction insn
;
60 struct tgsi_instruction_label insn_label
;
61 struct tgsi_instruction_texture insn_texture
;
62 struct tgsi_instruction_memory insn_memory
;
63 struct tgsi_texture_offset insn_texture_offset
;
64 struct tgsi_src_register src
;
65 struct tgsi_ind_register ind
;
66 struct tgsi_dimension dim
;
67 struct tgsi_dst_register dst
;
73 union tgsi_any_token
*tokens
;
79 #define UREG_MAX_INPUT (4 * PIPE_MAX_SHADER_INPUTS)
80 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
81 #define UREG_MAX_OUTPUT (4 * PIPE_MAX_SHADER_OUTPUTS)
82 #define UREG_MAX_CONSTANT_RANGE 32
83 #define UREG_MAX_HW_ATOMIC_RANGE 32
84 #define UREG_MAX_IMMEDIATE 4096
85 #define UREG_MAX_ADDR 3
86 #define UREG_MAX_ARRAY_TEMPS 256
92 } constant_range
[UREG_MAX_CONSTANT_RANGE
];
93 unsigned nr_constant_ranges
;
96 struct hw_atomic_decl
{
101 } hw_atomic_range
[UREG_MAX_HW_ATOMIC_RANGE
];
102 unsigned nr_hw_atomic_ranges
;
105 #define DOMAIN_DECL 0
106 #define DOMAIN_INSN 1
110 enum pipe_shader_type processor
;
111 bool supports_any_inout_decl_range
;
112 int next_shader_processor
;
115 enum tgsi_semantic semantic_name
;
116 unsigned semantic_index
;
117 enum tgsi_interpolate_mode interp
;
118 unsigned char cylindrical_wrap
;
119 unsigned char usage_mask
;
120 enum tgsi_interpolate_loc interp_location
;
124 } input
[UREG_MAX_INPUT
];
125 unsigned nr_inputs
, nr_input_regs
;
127 unsigned vs_inputs
[PIPE_MAX_ATTRIBS
/32];
130 enum tgsi_semantic semantic_name
;
131 unsigned semantic_index
;
132 } system_value
[UREG_MAX_SYSTEM_VALUE
];
133 unsigned nr_system_values
;
136 enum tgsi_semantic semantic_name
;
137 unsigned semantic_index
;
139 unsigned usage_mask
; /* = TGSI_WRITEMASK_* */
143 } output
[UREG_MAX_OUTPUT
];
144 unsigned nr_outputs
, nr_output_regs
;
154 } immediate
[UREG_MAX_IMMEDIATE
];
155 unsigned nr_immediates
;
157 struct ureg_src sampler
[PIPE_MAX_SAMPLERS
];
158 unsigned nr_samplers
;
162 enum tgsi_texture_type target
;
163 enum tgsi_return_type return_type_x
;
164 enum tgsi_return_type return_type_y
;
165 enum tgsi_return_type return_type_z
;
166 enum tgsi_return_type return_type_w
;
167 } sampler_view
[PIPE_MAX_SHADER_SAMPLER_VIEWS
];
168 unsigned nr_sampler_views
;
172 enum tgsi_texture_type target
;
173 enum pipe_format format
;
176 } image
[PIPE_MAX_SHADER_IMAGES
];
182 } buffer
[PIPE_MAX_SHADER_BUFFERS
];
185 struct util_bitmask
*free_temps
;
186 struct util_bitmask
*local_temps
;
187 struct util_bitmask
*decl_temps
;
190 unsigned array_temps
[UREG_MAX_ARRAY_TEMPS
];
191 unsigned nr_array_temps
;
193 struct const_decl const_decls
[PIPE_MAX_CONSTANT_BUFFERS
];
195 struct hw_atomic_decl hw_atomic_decls
[PIPE_MAX_HW_ATOMIC_BUFFERS
];
197 unsigned properties
[TGSI_PROPERTY_COUNT
];
200 unsigned nr_instructions
;
202 struct ureg_tokens domain
[2];
204 bool use_memory
[TGSI_MEMORY_TYPE_COUNT
];
207 static union tgsi_any_token error_tokens
[32];
209 static void tokens_error( struct ureg_tokens
*tokens
)
211 if (tokens
->tokens
&& tokens
->tokens
!= error_tokens
)
212 FREE(tokens
->tokens
);
214 tokens
->tokens
= error_tokens
;
215 tokens
->size
= ARRAY_SIZE(error_tokens
);
220 static void tokens_expand( struct ureg_tokens
*tokens
,
223 unsigned old_size
= tokens
->size
* sizeof(unsigned);
225 if (tokens
->tokens
== error_tokens
) {
229 while (tokens
->count
+ count
> tokens
->size
) {
230 tokens
->size
= (1 << ++tokens
->order
);
233 tokens
->tokens
= REALLOC(tokens
->tokens
,
235 tokens
->size
* sizeof(unsigned));
236 if (tokens
->tokens
== NULL
) {
237 tokens_error(tokens
);
241 static void set_bad( struct ureg_program
*ureg
)
243 tokens_error(&ureg
->domain
[0]);
248 static union tgsi_any_token
*get_tokens( struct ureg_program
*ureg
,
252 struct ureg_tokens
*tokens
= &ureg
->domain
[domain
];
253 union tgsi_any_token
*result
;
255 if (tokens
->count
+ count
> tokens
->size
)
256 tokens_expand(tokens
, count
);
258 result
= &tokens
->tokens
[tokens
->count
];
259 tokens
->count
+= count
;
264 static union tgsi_any_token
*retrieve_token( struct ureg_program
*ureg
,
268 if (ureg
->domain
[domain
].tokens
== error_tokens
)
269 return &error_tokens
[0];
271 return &ureg
->domain
[domain
].tokens
[nr
];
276 ureg_property(struct ureg_program
*ureg
, unsigned name
, unsigned value
)
278 assert(name
< ARRAY_SIZE(ureg
->properties
));
279 ureg
->properties
[name
] = value
;
283 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program
*ureg
,
284 enum tgsi_semantic semantic_name
,
285 unsigned semantic_index
,
286 enum tgsi_interpolate_mode interp_mode
,
287 unsigned cylindrical_wrap
,
288 enum tgsi_interpolate_loc interp_location
,
296 assert(usage_mask
!= 0);
297 assert(usage_mask
<= TGSI_WRITEMASK_XYZW
);
299 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
300 if (ureg
->input
[i
].semantic_name
== semantic_name
&&
301 ureg
->input
[i
].semantic_index
== semantic_index
) {
302 assert(ureg
->input
[i
].interp
== interp_mode
);
303 assert(ureg
->input
[i
].cylindrical_wrap
== cylindrical_wrap
);
304 assert(ureg
->input
[i
].interp_location
== interp_location
);
305 if (ureg
->input
[i
].array_id
== array_id
) {
306 ureg
->input
[i
].usage_mask
|= usage_mask
;
309 assert((ureg
->input
[i
].usage_mask
& usage_mask
) == 0);
313 if (ureg
->nr_inputs
< UREG_MAX_INPUT
) {
314 assert(array_size
>= 1);
315 ureg
->input
[i
].semantic_name
= semantic_name
;
316 ureg
->input
[i
].semantic_index
= semantic_index
;
317 ureg
->input
[i
].interp
= interp_mode
;
318 ureg
->input
[i
].cylindrical_wrap
= cylindrical_wrap
;
319 ureg
->input
[i
].interp_location
= interp_location
;
320 ureg
->input
[i
].first
= index
;
321 ureg
->input
[i
].last
= index
+ array_size
- 1;
322 ureg
->input
[i
].array_id
= array_id
;
323 ureg
->input
[i
].usage_mask
= usage_mask
;
324 ureg
->nr_input_regs
= MAX2(ureg
->nr_input_regs
, index
+ array_size
);
331 return ureg_src_array_register(TGSI_FILE_INPUT
, ureg
->input
[i
].first
,
336 ureg_DECL_fs_input_cyl_centroid(struct ureg_program
*ureg
,
337 enum tgsi_semantic semantic_name
,
338 unsigned semantic_index
,
339 enum tgsi_interpolate_mode interp_mode
,
340 unsigned cylindrical_wrap
,
341 enum tgsi_interpolate_loc interp_location
,
345 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
346 semantic_name
, semantic_index
, interp_mode
,
347 cylindrical_wrap
, interp_location
,
348 ureg
->nr_input_regs
, TGSI_WRITEMASK_XYZW
, array_id
, array_size
);
353 ureg_DECL_vs_input( struct ureg_program
*ureg
,
356 assert(ureg
->processor
== PIPE_SHADER_VERTEX
);
357 assert(index
/ 32 < ARRAY_SIZE(ureg
->vs_inputs
));
359 ureg
->vs_inputs
[index
/32] |= 1 << (index
% 32);
360 return ureg_src_register( TGSI_FILE_INPUT
, index
);
365 ureg_DECL_input_layout(struct ureg_program
*ureg
,
366 enum tgsi_semantic semantic_name
,
367 unsigned semantic_index
,
373 return ureg_DECL_fs_input_cyl_centroid_layout(ureg
,
374 semantic_name
, semantic_index
,
375 TGSI_INTERPOLATE_CONSTANT
, 0, TGSI_INTERPOLATE_LOC_CENTER
,
376 index
, usage_mask
, array_id
, array_size
);
381 ureg_DECL_input(struct ureg_program
*ureg
,
382 enum tgsi_semantic semantic_name
,
383 unsigned semantic_index
,
387 return ureg_DECL_fs_input_cyl_centroid(ureg
, semantic_name
, semantic_index
,
388 TGSI_INTERPOLATE_CONSTANT
, 0,
389 TGSI_INTERPOLATE_LOC_CENTER
,
390 array_id
, array_size
);
395 ureg_DECL_system_value(struct ureg_program
*ureg
,
396 enum tgsi_semantic semantic_name
,
397 unsigned semantic_index
)
401 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
402 if (ureg
->system_value
[i
].semantic_name
== semantic_name
&&
403 ureg
->system_value
[i
].semantic_index
== semantic_index
) {
408 if (ureg
->nr_system_values
< UREG_MAX_SYSTEM_VALUE
) {
409 ureg
->system_value
[ureg
->nr_system_values
].semantic_name
= semantic_name
;
410 ureg
->system_value
[ureg
->nr_system_values
].semantic_index
= semantic_index
;
411 i
= ureg
->nr_system_values
;
412 ureg
->nr_system_values
++;
418 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE
, i
);
423 ureg_DECL_output_layout(struct ureg_program
*ureg
,
424 enum tgsi_semantic semantic_name
,
425 unsigned semantic_index
,
434 assert(usage_mask
!= 0);
435 assert(!(streams
& 0x03) || (usage_mask
& 1));
436 assert(!(streams
& 0x0c) || (usage_mask
& 2));
437 assert(!(streams
& 0x30) || (usage_mask
& 4));
438 assert(!(streams
& 0xc0) || (usage_mask
& 8));
440 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
441 if (ureg
->output
[i
].semantic_name
== semantic_name
&&
442 ureg
->output
[i
].semantic_index
== semantic_index
) {
443 if (ureg
->output
[i
].array_id
== array_id
) {
444 ureg
->output
[i
].usage_mask
|= usage_mask
;
447 assert((ureg
->output
[i
].usage_mask
& usage_mask
) == 0);
451 if (ureg
->nr_outputs
< UREG_MAX_OUTPUT
) {
452 ureg
->output
[i
].semantic_name
= semantic_name
;
453 ureg
->output
[i
].semantic_index
= semantic_index
;
454 ureg
->output
[i
].usage_mask
= usage_mask
;
455 ureg
->output
[i
].first
= index
;
456 ureg
->output
[i
].last
= index
+ array_size
- 1;
457 ureg
->output
[i
].array_id
= array_id
;
458 ureg
->nr_output_regs
= MAX2(ureg
->nr_output_regs
, index
+ array_size
);
467 ureg
->output
[i
].streams
|= streams
;
469 return ureg_dst_array_register(TGSI_FILE_OUTPUT
, ureg
->output
[i
].first
,
475 ureg_DECL_output_masked(struct ureg_program
*ureg
,
482 return ureg_DECL_output_layout(ureg
, name
, index
, 0,
483 ureg
->nr_output_regs
, usage_mask
, array_id
, array_size
);
488 ureg_DECL_output(struct ureg_program
*ureg
,
489 enum tgsi_semantic name
,
492 return ureg_DECL_output_masked(ureg
, name
, index
, TGSI_WRITEMASK_XYZW
,
497 ureg_DECL_output_array(struct ureg_program
*ureg
,
498 enum tgsi_semantic semantic_name
,
499 unsigned semantic_index
,
503 return ureg_DECL_output_masked(ureg
, semantic_name
, semantic_index
,
505 array_id
, array_size
);
509 /* Returns a new constant register. Keep track of which have been
510 * referred to so that we can emit decls later.
512 * Constant operands declared with this function must be addressed
513 * with a two-dimensional index.
515 * There is nothing in this code to bind this constant to any tracked
516 * value or manage any constant_buffer contents -- that's the
517 * resposibility of the calling code.
520 ureg_DECL_constant2D(struct ureg_program
*ureg
,
525 struct const_decl
*decl
= &ureg
->const_decls
[index2D
];
527 assert(index2D
< PIPE_MAX_CONSTANT_BUFFERS
);
529 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
530 uint i
= decl
->nr_constant_ranges
++;
532 decl
->constant_range
[i
].first
= first
;
533 decl
->constant_range
[i
].last
= last
;
538 /* A one-dimensional, deprecated version of ureg_DECL_constant2D().
540 * Constant operands declared with this function must be addressed
541 * with a one-dimensional index.
544 ureg_DECL_constant(struct ureg_program
*ureg
,
547 struct const_decl
*decl
= &ureg
->const_decls
[0];
548 unsigned minconst
= index
, maxconst
= index
;
551 /* Inside existing range?
553 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
554 if (decl
->constant_range
[i
].first
<= index
&&
555 decl
->constant_range
[i
].last
>= index
) {
560 /* Extend existing range?
562 for (i
= 0; i
< decl
->nr_constant_ranges
; i
++) {
563 if (decl
->constant_range
[i
].last
== index
- 1) {
564 decl
->constant_range
[i
].last
= index
;
568 if (decl
->constant_range
[i
].first
== index
+ 1) {
569 decl
->constant_range
[i
].first
= index
;
573 minconst
= MIN2(minconst
, decl
->constant_range
[i
].first
);
574 maxconst
= MAX2(maxconst
, decl
->constant_range
[i
].last
);
579 if (decl
->nr_constant_ranges
< UREG_MAX_CONSTANT_RANGE
) {
580 i
= decl
->nr_constant_ranges
++;
581 decl
->constant_range
[i
].first
= index
;
582 decl
->constant_range
[i
].last
= index
;
586 /* Collapse all ranges down to one:
589 decl
->constant_range
[0].first
= minconst
;
590 decl
->constant_range
[0].last
= maxconst
;
591 decl
->nr_constant_ranges
= 1;
594 assert(i
< decl
->nr_constant_ranges
);
595 assert(decl
->constant_range
[i
].first
<= index
);
596 assert(decl
->constant_range
[i
].last
>= index
);
598 struct ureg_src src
= ureg_src_register(TGSI_FILE_CONSTANT
, index
);
599 return ureg_src_dimension(src
, 0);
603 /* Returns a new hw atomic register. Keep track of which have been
604 * referred to so that we can emit decls later.
607 ureg_DECL_hw_atomic(struct ureg_program
*ureg
,
613 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[buffer_id
];
615 if (decl
->nr_hw_atomic_ranges
< UREG_MAX_HW_ATOMIC_RANGE
) {
616 uint i
= decl
->nr_hw_atomic_ranges
++;
618 decl
->hw_atomic_range
[i
].first
= first
;
619 decl
->hw_atomic_range
[i
].last
= last
;
620 decl
->hw_atomic_range
[i
].array_id
= array_id
;
626 static struct ureg_dst
alloc_temporary( struct ureg_program
*ureg
,
631 /* Look for a released temporary.
633 for (i
= util_bitmask_get_first_index(ureg
->free_temps
);
634 i
!= UTIL_BITMASK_INVALID_INDEX
;
635 i
= util_bitmask_get_next_index(ureg
->free_temps
, i
+ 1)) {
636 if (util_bitmask_get(ureg
->local_temps
, i
) == local
)
640 /* Or allocate a new one.
642 if (i
== UTIL_BITMASK_INVALID_INDEX
) {
643 i
= ureg
->nr_temps
++;
646 util_bitmask_set(ureg
->local_temps
, i
);
648 /* Start a new declaration when the local flag changes */
649 if (!i
|| util_bitmask_get(ureg
->local_temps
, i
- 1) != local
)
650 util_bitmask_set(ureg
->decl_temps
, i
);
653 util_bitmask_clear(ureg
->free_temps
, i
);
655 return ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
658 struct ureg_dst
ureg_DECL_temporary( struct ureg_program
*ureg
)
660 return alloc_temporary(ureg
, FALSE
);
663 struct ureg_dst
ureg_DECL_local_temporary( struct ureg_program
*ureg
)
665 return alloc_temporary(ureg
, TRUE
);
668 struct ureg_dst
ureg_DECL_array_temporary( struct ureg_program
*ureg
,
672 unsigned i
= ureg
->nr_temps
;
673 struct ureg_dst dst
= ureg_dst_register( TGSI_FILE_TEMPORARY
, i
);
676 util_bitmask_set(ureg
->local_temps
, i
);
678 /* Always start a new declaration at the start */
679 util_bitmask_set(ureg
->decl_temps
, i
);
681 ureg
->nr_temps
+= size
;
683 /* and also at the end of the array */
684 util_bitmask_set(ureg
->decl_temps
, ureg
->nr_temps
);
686 if (ureg
->nr_array_temps
< UREG_MAX_ARRAY_TEMPS
) {
687 ureg
->array_temps
[ureg
->nr_array_temps
++] = i
;
688 dst
.ArrayID
= ureg
->nr_array_temps
;
694 void ureg_release_temporary( struct ureg_program
*ureg
,
695 struct ureg_dst tmp
)
697 if(tmp
.File
== TGSI_FILE_TEMPORARY
)
698 util_bitmask_set(ureg
->free_temps
, tmp
.Index
);
702 /* Allocate a new address register.
704 struct ureg_dst
ureg_DECL_address( struct ureg_program
*ureg
)
706 if (ureg
->nr_addrs
< UREG_MAX_ADDR
)
707 return ureg_dst_register( TGSI_FILE_ADDRESS
, ureg
->nr_addrs
++ );
710 return ureg_dst_register( TGSI_FILE_ADDRESS
, 0 );
713 /* Allocate a new sampler.
715 struct ureg_src
ureg_DECL_sampler( struct ureg_program
*ureg
,
720 for (i
= 0; i
< ureg
->nr_samplers
; i
++)
721 if (ureg
->sampler
[i
].Index
== (int)nr
)
722 return ureg
->sampler
[i
];
724 if (i
< PIPE_MAX_SAMPLERS
) {
725 ureg
->sampler
[i
] = ureg_src_register( TGSI_FILE_SAMPLER
, nr
);
727 return ureg
->sampler
[i
];
731 return ureg
->sampler
[0];
735 * Allocate a new shader sampler view.
738 ureg_DECL_sampler_view(struct ureg_program
*ureg
,
740 enum tgsi_texture_type target
,
741 enum tgsi_return_type return_type_x
,
742 enum tgsi_return_type return_type_y
,
743 enum tgsi_return_type return_type_z
,
744 enum tgsi_return_type return_type_w
)
746 struct ureg_src reg
= ureg_src_register(TGSI_FILE_SAMPLER_VIEW
, index
);
749 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
750 if (ureg
->sampler_view
[i
].index
== index
) {
755 if (i
< PIPE_MAX_SHADER_SAMPLER_VIEWS
) {
756 ureg
->sampler_view
[i
].index
= index
;
757 ureg
->sampler_view
[i
].target
= target
;
758 ureg
->sampler_view
[i
].return_type_x
= return_type_x
;
759 ureg
->sampler_view
[i
].return_type_y
= return_type_y
;
760 ureg
->sampler_view
[i
].return_type_z
= return_type_z
;
761 ureg
->sampler_view
[i
].return_type_w
= return_type_w
;
762 ureg
->nr_sampler_views
++;
770 /* Allocate a new image.
773 ureg_DECL_image(struct ureg_program
*ureg
,
775 enum tgsi_texture_type target
,
776 enum pipe_format format
,
780 struct ureg_src reg
= ureg_src_register(TGSI_FILE_IMAGE
, index
);
783 for (i
= 0; i
< ureg
->nr_images
; i
++)
784 if (ureg
->image
[i
].index
== index
)
787 if (i
< PIPE_MAX_SHADER_IMAGES
) {
788 ureg
->image
[i
].index
= index
;
789 ureg
->image
[i
].target
= target
;
790 ureg
->image
[i
].wr
= wr
;
791 ureg
->image
[i
].raw
= raw
;
792 ureg
->image
[i
].format
= format
;
801 /* Allocate a new buffer.
803 struct ureg_src
ureg_DECL_buffer(struct ureg_program
*ureg
, unsigned nr
,
806 struct ureg_src reg
= ureg_src_register(TGSI_FILE_BUFFER
, nr
);
809 for (i
= 0; i
< ureg
->nr_buffers
; i
++)
810 if (ureg
->buffer
[i
].index
== nr
)
813 if (i
< PIPE_MAX_SHADER_BUFFERS
) {
814 ureg
->buffer
[i
].index
= nr
;
815 ureg
->buffer
[i
].atomic
= atomic
;
824 /* Allocate a memory area.
826 struct ureg_src
ureg_DECL_memory(struct ureg_program
*ureg
,
827 unsigned memory_type
)
829 struct ureg_src reg
= ureg_src_register(TGSI_FILE_MEMORY
, memory_type
);
831 ureg
->use_memory
[memory_type
] = true;
836 match_or_expand_immediate64( const unsigned *v
,
842 unsigned nr2
= *pnr2
;
846 for (i
= 0; i
< nr
; i
+= 2) {
847 boolean found
= FALSE
;
849 for (j
= 0; j
< nr2
&& !found
; j
+= 2) {
850 if (v
[i
] == v2
[j
] && v
[i
+ 1] == v2
[j
+ 1]) {
851 *swizzle
|= (j
<< (i
* 2)) | ((j
+ 1) << ((i
+ 1) * 2));
861 v2
[nr2
+ 1] = v
[i
+ 1];
863 *swizzle
|= (nr2
<< (i
* 2)) | ((nr2
+ 1) << ((i
+ 1) * 2));
868 /* Actually expand immediate only when fully succeeded.
875 match_or_expand_immediate( const unsigned *v
,
882 unsigned nr2
= *pnr2
;
885 if (type
== TGSI_IMM_FLOAT64
||
886 type
== TGSI_IMM_UINT64
||
887 type
== TGSI_IMM_INT64
)
888 return match_or_expand_immediate64(v
, nr
, v2
, pnr2
, swizzle
);
892 for (i
= 0; i
< nr
; i
++) {
893 boolean found
= FALSE
;
895 for (j
= 0; j
< nr2
&& !found
; j
++) {
897 *swizzle
|= j
<< (i
* 2);
908 *swizzle
|= nr2
<< (i
* 2);
913 /* Actually expand immediate only when fully succeeded.
920 static struct ureg_src
921 decl_immediate( struct ureg_program
*ureg
,
927 unsigned swizzle
= 0;
929 /* Could do a first pass where we examine all existing immediates
933 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
934 if (ureg
->immediate
[i
].type
!= type
) {
937 if (match_or_expand_immediate(v
,
940 ureg
->immediate
[i
].value
.u
,
941 &ureg
->immediate
[i
].nr
,
947 if (ureg
->nr_immediates
< UREG_MAX_IMMEDIATE
) {
948 i
= ureg
->nr_immediates
++;
949 ureg
->immediate
[i
].type
= type
;
950 if (match_or_expand_immediate(v
,
953 ureg
->immediate
[i
].value
.u
,
954 &ureg
->immediate
[i
].nr
,
963 /* Make sure that all referenced elements are from this immediate.
964 * Has the effect of making size-one immediates into scalars.
966 if (type
== TGSI_IMM_FLOAT64
||
967 type
== TGSI_IMM_UINT64
||
968 type
== TGSI_IMM_INT64
) {
969 for (j
= nr
; j
< 4; j
+=2) {
970 swizzle
|= (swizzle
& 0xf) << (j
* 2);
973 for (j
= nr
; j
< 4; j
++) {
974 swizzle
|= (swizzle
& 0x3) << (j
* 2);
977 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE
, i
),
978 (swizzle
>> 0) & 0x3,
979 (swizzle
>> 2) & 0x3,
980 (swizzle
>> 4) & 0x3,
981 (swizzle
>> 6) & 0x3);
986 ureg_DECL_immediate( struct ureg_program
*ureg
,
996 for (i
= 0; i
< nr
; i
++) {
1000 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT32
);
1004 ureg_DECL_immediate_f64( struct ureg_program
*ureg
,
1014 assert((nr
/ 2) < 3);
1015 for (i
= 0; i
< nr
/ 2; i
++) {
1019 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_FLOAT64
);
1023 ureg_DECL_immediate_uint( struct ureg_program
*ureg
,
1027 return decl_immediate(ureg
, v
, nr
, TGSI_IMM_UINT32
);
1032 ureg_DECL_immediate_block_uint( struct ureg_program
*ureg
,
1039 if (ureg
->nr_immediates
+ (nr
+ 3) / 4 > UREG_MAX_IMMEDIATE
) {
1041 return ureg_src_register(TGSI_FILE_IMMEDIATE
, 0);
1044 index
= ureg
->nr_immediates
;
1045 ureg
->nr_immediates
+= (nr
+ 3) / 4;
1047 for (i
= index
; i
< ureg
->nr_immediates
; i
++) {
1048 ureg
->immediate
[i
].type
= TGSI_IMM_UINT32
;
1049 ureg
->immediate
[i
].nr
= nr
> 4 ? 4 : nr
;
1050 memcpy(ureg
->immediate
[i
].value
.u
,
1051 &v
[(i
- index
) * 4],
1052 ureg
->immediate
[i
].nr
* sizeof(uint
));
1056 return ureg_src_register(TGSI_FILE_IMMEDIATE
, index
);
1061 ureg_DECL_immediate_int( struct ureg_program
*ureg
,
1065 return decl_immediate(ureg
, (const unsigned *)v
, nr
, TGSI_IMM_INT32
);
1069 ureg_DECL_immediate_uint64( struct ureg_program
*ureg
,
1079 assert((nr
/ 2) < 3);
1080 for (i
= 0; i
< nr
/ 2; i
++) {
1084 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_UINT64
);
1088 ureg_DECL_immediate_int64( struct ureg_program
*ureg
,
1098 assert((nr
/ 2) < 3);
1099 for (i
= 0; i
< nr
/ 2; i
++) {
1103 return decl_immediate(ureg
, fu
.u
, nr
, TGSI_IMM_INT64
);
1107 ureg_emit_src( struct ureg_program
*ureg
,
1108 struct ureg_src src
)
1110 unsigned size
= 1 + (src
.Indirect
? 1 : 0) +
1111 (src
.Dimension
? (src
.DimIndirect
? 2 : 1) : 0);
1113 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1116 assert(src
.File
!= TGSI_FILE_NULL
);
1117 assert(src
.File
< TGSI_FILE_COUNT
);
1120 out
[n
].src
.File
= src
.File
;
1121 out
[n
].src
.SwizzleX
= src
.SwizzleX
;
1122 out
[n
].src
.SwizzleY
= src
.SwizzleY
;
1123 out
[n
].src
.SwizzleZ
= src
.SwizzleZ
;
1124 out
[n
].src
.SwizzleW
= src
.SwizzleW
;
1125 out
[n
].src
.Index
= src
.Index
;
1126 out
[n
].src
.Negate
= src
.Negate
;
1127 out
[0].src
.Absolute
= src
.Absolute
;
1131 out
[0].src
.Indirect
= 1;
1133 out
[n
].ind
.File
= src
.IndirectFile
;
1134 out
[n
].ind
.Swizzle
= src
.IndirectSwizzle
;
1135 out
[n
].ind
.Index
= src
.IndirectIndex
;
1136 if (!ureg
->supports_any_inout_decl_range
&&
1137 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1138 out
[n
].ind
.ArrayID
= 0;
1140 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1144 if (src
.Dimension
) {
1145 out
[0].src
.Dimension
= 1;
1146 out
[n
].dim
.Dimension
= 0;
1147 out
[n
].dim
.Padding
= 0;
1148 if (src
.DimIndirect
) {
1149 out
[n
].dim
.Indirect
= 1;
1150 out
[n
].dim
.Index
= src
.DimensionIndex
;
1153 out
[n
].ind
.File
= src
.DimIndFile
;
1154 out
[n
].ind
.Swizzle
= src
.DimIndSwizzle
;
1155 out
[n
].ind
.Index
= src
.DimIndIndex
;
1156 if (!ureg
->supports_any_inout_decl_range
&&
1157 (src
.File
== TGSI_FILE_INPUT
|| src
.File
== TGSI_FILE_OUTPUT
))
1158 out
[n
].ind
.ArrayID
= 0;
1160 out
[n
].ind
.ArrayID
= src
.ArrayID
;
1162 out
[n
].dim
.Indirect
= 0;
1163 out
[n
].dim
.Index
= src
.DimensionIndex
;
1173 ureg_emit_dst( struct ureg_program
*ureg
,
1174 struct ureg_dst dst
)
1176 unsigned size
= 1 + (dst
.Indirect
? 1 : 0) +
1177 (dst
.Dimension
? (dst
.DimIndirect
? 2 : 1) : 0);
1179 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_INSN
, size
);
1182 assert(dst
.File
!= TGSI_FILE_NULL
);
1183 assert(dst
.File
!= TGSI_FILE_SAMPLER
);
1184 assert(dst
.File
!= TGSI_FILE_SAMPLER_VIEW
);
1185 assert(dst
.File
!= TGSI_FILE_IMMEDIATE
);
1186 assert(dst
.File
< TGSI_FILE_COUNT
);
1189 out
[n
].dst
.File
= dst
.File
;
1190 out
[n
].dst
.WriteMask
= dst
.WriteMask
;
1191 out
[n
].dst
.Indirect
= dst
.Indirect
;
1192 out
[n
].dst
.Index
= dst
.Index
;
1197 out
[n
].ind
.File
= dst
.IndirectFile
;
1198 out
[n
].ind
.Swizzle
= dst
.IndirectSwizzle
;
1199 out
[n
].ind
.Index
= dst
.IndirectIndex
;
1200 if (!ureg
->supports_any_inout_decl_range
&&
1201 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1202 out
[n
].ind
.ArrayID
= 0;
1204 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1208 if (dst
.Dimension
) {
1209 out
[0].dst
.Dimension
= 1;
1210 out
[n
].dim
.Dimension
= 0;
1211 out
[n
].dim
.Padding
= 0;
1212 if (dst
.DimIndirect
) {
1213 out
[n
].dim
.Indirect
= 1;
1214 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1217 out
[n
].ind
.File
= dst
.DimIndFile
;
1218 out
[n
].ind
.Swizzle
= dst
.DimIndSwizzle
;
1219 out
[n
].ind
.Index
= dst
.DimIndIndex
;
1220 if (!ureg
->supports_any_inout_decl_range
&&
1221 (dst
.File
== TGSI_FILE_INPUT
|| dst
.File
== TGSI_FILE_OUTPUT
))
1222 out
[n
].ind
.ArrayID
= 0;
1224 out
[n
].ind
.ArrayID
= dst
.ArrayID
;
1226 out
[n
].dim
.Indirect
= 0;
1227 out
[n
].dim
.Index
= dst
.DimensionIndex
;
1236 static void validate( enum tgsi_opcode opcode
,
1241 const struct tgsi_opcode_info
*info
= tgsi_get_opcode_info( opcode
);
1244 assert(nr_dst
== info
->num_dst
);
1245 assert(nr_src
== info
->num_src
);
1250 struct ureg_emit_insn_result
1251 ureg_emit_insn(struct ureg_program
*ureg
,
1252 enum tgsi_opcode opcode
,
1258 union tgsi_any_token
*out
;
1260 struct ureg_emit_insn_result result
;
1262 validate( opcode
, num_dst
, num_src
);
1264 out
= get_tokens( ureg
, DOMAIN_INSN
, count
);
1265 out
[0].insn
= tgsi_default_instruction();
1266 out
[0].insn
.Opcode
= opcode
;
1267 out
[0].insn
.Saturate
= saturate
;
1268 out
[0].insn
.Precise
= precise
;
1269 out
[0].insn
.NumDstRegs
= num_dst
;
1270 out
[0].insn
.NumSrcRegs
= num_src
;
1272 result
.insn_token
= ureg
->domain
[DOMAIN_INSN
].count
- count
;
1273 result
.extended_token
= result
.insn_token
;
1275 ureg
->nr_instructions
++;
1282 * Emit a label token.
1283 * \param label_token returns a token number indicating where the label
1284 * needs to be patched later. Later, this value should be passed to the
1285 * ureg_fixup_label() function.
1288 ureg_emit_label(struct ureg_program
*ureg
,
1289 unsigned extended_token
,
1290 unsigned *label_token
)
1292 union tgsi_any_token
*out
, *insn
;
1297 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1300 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1301 insn
->insn
.Label
= 1;
1303 *label_token
= ureg
->domain
[DOMAIN_INSN
].count
- 1;
1306 /* Will return a number which can be used in a label to point to the
1307 * next instruction to be emitted.
1310 ureg_get_instruction_number( struct ureg_program
*ureg
)
1312 return ureg
->nr_instructions
;
1315 /* Patch a given label (expressed as a token number) to point to a
1316 * given instruction (expressed as an instruction number).
1319 ureg_fixup_label(struct ureg_program
*ureg
,
1320 unsigned label_token
,
1321 unsigned instruction_number
)
1323 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, label_token
);
1325 out
->insn_label
.Label
= instruction_number
;
1330 ureg_emit_texture(struct ureg_program
*ureg
,
1331 unsigned extended_token
,
1332 enum tgsi_texture_type target
,
1333 enum tgsi_return_type return_type
, unsigned num_offsets
)
1335 union tgsi_any_token
*out
, *insn
;
1337 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1338 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1340 insn
->insn
.Texture
= 1;
1343 out
[0].insn_texture
.Texture
= target
;
1344 out
[0].insn_texture
.NumOffsets
= num_offsets
;
1345 out
[0].insn_texture
.ReturnType
= return_type
;
1349 ureg_emit_texture_offset(struct ureg_program
*ureg
,
1350 const struct tgsi_texture_offset
*offset
)
1352 union tgsi_any_token
*out
;
1354 out
= get_tokens( ureg
, DOMAIN_INSN
, 1);
1357 out
[0].insn_texture_offset
= *offset
;
1362 ureg_emit_memory(struct ureg_program
*ureg
,
1363 unsigned extended_token
,
1365 enum tgsi_texture_type texture
,
1366 enum pipe_format format
)
1368 union tgsi_any_token
*out
, *insn
;
1370 out
= get_tokens( ureg
, DOMAIN_INSN
, 1 );
1371 insn
= retrieve_token( ureg
, DOMAIN_INSN
, extended_token
);
1373 insn
->insn
.Memory
= 1;
1376 out
[0].insn_memory
.Qualifier
= qualifier
;
1377 out
[0].insn_memory
.Texture
= texture
;
1378 out
[0].insn_memory
.Format
= format
;
1382 ureg_fixup_insn_size(struct ureg_program
*ureg
,
1385 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_INSN
, insn
);
1387 assert(out
->insn
.Type
== TGSI_TOKEN_TYPE_INSTRUCTION
);
1388 out
->insn
.NrTokens
= ureg
->domain
[DOMAIN_INSN
].count
- insn
- 1;
1393 ureg_insn(struct ureg_program
*ureg
,
1394 enum tgsi_opcode opcode
,
1395 const struct ureg_dst
*dst
,
1397 const struct ureg_src
*src
,
1401 struct ureg_emit_insn_result insn
;
1405 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1409 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1411 insn
= ureg_emit_insn(ureg
,
1418 for (i
= 0; i
< nr_dst
; i
++)
1419 ureg_emit_dst( ureg
, dst
[i
] );
1421 for (i
= 0; i
< nr_src
; i
++)
1422 ureg_emit_src( ureg
, src
[i
] );
1424 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1428 ureg_tex_insn(struct ureg_program
*ureg
,
1429 enum tgsi_opcode opcode
,
1430 const struct ureg_dst
*dst
,
1432 enum tgsi_texture_type target
,
1433 enum tgsi_return_type return_type
,
1434 const struct tgsi_texture_offset
*texoffsets
,
1436 const struct ureg_src
*src
,
1439 struct ureg_emit_insn_result insn
;
1443 if (nr_dst
&& ureg_dst_is_empty(dst
[0])) {
1447 saturate
= nr_dst
? dst
[0].Saturate
: FALSE
;
1449 insn
= ureg_emit_insn(ureg
,
1456 ureg_emit_texture( ureg
, insn
.extended_token
, target
, return_type
,
1459 for (i
= 0; i
< nr_offset
; i
++)
1460 ureg_emit_texture_offset( ureg
, &texoffsets
[i
]);
1462 for (i
= 0; i
< nr_dst
; i
++)
1463 ureg_emit_dst( ureg
, dst
[i
] );
1465 for (i
= 0; i
< nr_src
; i
++)
1466 ureg_emit_src( ureg
, src
[i
] );
1468 ureg_fixup_insn_size( ureg
, insn
.insn_token
);
1473 ureg_memory_insn(struct ureg_program
*ureg
,
1474 enum tgsi_opcode opcode
,
1475 const struct ureg_dst
*dst
,
1477 const struct ureg_src
*src
,
1480 enum tgsi_texture_type texture
,
1481 enum pipe_format format
)
1483 struct ureg_emit_insn_result insn
;
1486 insn
= ureg_emit_insn(ureg
,
1493 ureg_emit_memory(ureg
, insn
.extended_token
, qualifier
, texture
, format
);
1495 for (i
= 0; i
< nr_dst
; i
++)
1496 ureg_emit_dst(ureg
, dst
[i
]);
1498 for (i
= 0; i
< nr_src
; i
++)
1499 ureg_emit_src(ureg
, src
[i
]);
1501 ureg_fixup_insn_size(ureg
, insn
.insn_token
);
1506 emit_decl_semantic(struct ureg_program
*ureg
,
1510 enum tgsi_semantic semantic_name
,
1511 unsigned semantic_index
,
1513 unsigned usage_mask
,
1516 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1519 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1520 out
[0].decl
.NrTokens
= 3;
1521 out
[0].decl
.File
= file
;
1522 out
[0].decl
.UsageMask
= usage_mask
;
1523 out
[0].decl
.Semantic
= 1;
1524 out
[0].decl
.Array
= array_id
!= 0;
1527 out
[1].decl_range
.First
= first
;
1528 out
[1].decl_range
.Last
= last
;
1531 out
[2].decl_semantic
.Name
= semantic_name
;
1532 out
[2].decl_semantic
.Index
= semantic_index
;
1533 out
[2].decl_semantic
.StreamX
= streams
& 3;
1534 out
[2].decl_semantic
.StreamY
= (streams
>> 2) & 3;
1535 out
[2].decl_semantic
.StreamZ
= (streams
>> 4) & 3;
1536 out
[2].decl_semantic
.StreamW
= (streams
>> 6) & 3;
1540 out
[3].array
.ArrayID
= array_id
;
1545 emit_decl_atomic_2d(struct ureg_program
*ureg
,
1551 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, array_id
? 4 : 3);
1554 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1555 out
[0].decl
.NrTokens
= 3;
1556 out
[0].decl
.File
= TGSI_FILE_HW_ATOMIC
;
1557 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1558 out
[0].decl
.Dimension
= 1;
1559 out
[0].decl
.Array
= array_id
!= 0;
1562 out
[1].decl_range
.First
= first
;
1563 out
[1].decl_range
.Last
= last
;
1566 out
[2].decl_dim
.Index2D
= index2D
;
1570 out
[3].array
.ArrayID
= array_id
;
1575 emit_decl_fs(struct ureg_program
*ureg
,
1579 enum tgsi_semantic semantic_name
,
1580 unsigned semantic_index
,
1581 enum tgsi_interpolate_mode interpolate
,
1582 unsigned cylindrical_wrap
,
1583 enum tgsi_interpolate_loc interpolate_location
,
1585 unsigned usage_mask
)
1587 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
,
1591 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1592 out
[0].decl
.NrTokens
= 4;
1593 out
[0].decl
.File
= file
;
1594 out
[0].decl
.UsageMask
= usage_mask
;
1595 out
[0].decl
.Interpolate
= 1;
1596 out
[0].decl
.Semantic
= 1;
1597 out
[0].decl
.Array
= array_id
!= 0;
1600 out
[1].decl_range
.First
= first
;
1601 out
[1].decl_range
.Last
= last
;
1604 out
[2].decl_interp
.Interpolate
= interpolate
;
1605 out
[2].decl_interp
.CylindricalWrap
= cylindrical_wrap
;
1606 out
[2].decl_interp
.Location
= interpolate_location
;
1609 out
[3].decl_semantic
.Name
= semantic_name
;
1610 out
[3].decl_semantic
.Index
= semantic_index
;
1614 out
[4].array
.ArrayID
= array_id
;
1619 emit_decl_temps( struct ureg_program
*ureg
,
1620 unsigned first
, unsigned last
,
1624 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
,
1628 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1629 out
[0].decl
.NrTokens
= 2;
1630 out
[0].decl
.File
= TGSI_FILE_TEMPORARY
;
1631 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1632 out
[0].decl
.Local
= local
;
1635 out
[1].decl_range
.First
= first
;
1636 out
[1].decl_range
.Last
= last
;
1639 out
[0].decl
.Array
= 1;
1641 out
[2].array
.ArrayID
= arrayid
;
1645 static void emit_decl_range( struct ureg_program
*ureg
,
1650 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
1653 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1654 out
[0].decl
.NrTokens
= 2;
1655 out
[0].decl
.File
= file
;
1656 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1657 out
[0].decl
.Semantic
= 0;
1660 out
[1].decl_range
.First
= first
;
1661 out
[1].decl_range
.Last
= first
+ count
- 1;
1665 emit_decl_range2D(struct ureg_program
*ureg
,
1671 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1674 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1675 out
[0].decl
.NrTokens
= 3;
1676 out
[0].decl
.File
= file
;
1677 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1678 out
[0].decl
.Dimension
= 1;
1681 out
[1].decl_range
.First
= first
;
1682 out
[1].decl_range
.Last
= last
;
1685 out
[2].decl_dim
.Index2D
= index2D
;
1689 emit_decl_sampler_view(struct ureg_program
*ureg
,
1691 enum tgsi_texture_type target
,
1692 enum tgsi_return_type return_type_x
,
1693 enum tgsi_return_type return_type_y
,
1694 enum tgsi_return_type return_type_z
,
1695 enum tgsi_return_type return_type_w
)
1697 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1700 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1701 out
[0].decl
.NrTokens
= 3;
1702 out
[0].decl
.File
= TGSI_FILE_SAMPLER_VIEW
;
1703 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1706 out
[1].decl_range
.First
= index
;
1707 out
[1].decl_range
.Last
= index
;
1710 out
[2].decl_sampler_view
.Resource
= target
;
1711 out
[2].decl_sampler_view
.ReturnTypeX
= return_type_x
;
1712 out
[2].decl_sampler_view
.ReturnTypeY
= return_type_y
;
1713 out
[2].decl_sampler_view
.ReturnTypeZ
= return_type_z
;
1714 out
[2].decl_sampler_view
.ReturnTypeW
= return_type_w
;
1718 emit_decl_image(struct ureg_program
*ureg
,
1720 enum tgsi_texture_type target
,
1721 enum pipe_format format
,
1725 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 3);
1728 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1729 out
[0].decl
.NrTokens
= 3;
1730 out
[0].decl
.File
= TGSI_FILE_IMAGE
;
1731 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1734 out
[1].decl_range
.First
= index
;
1735 out
[1].decl_range
.Last
= index
;
1738 out
[2].decl_image
.Resource
= target
;
1739 out
[2].decl_image
.Writable
= wr
;
1740 out
[2].decl_image
.Raw
= raw
;
1741 out
[2].decl_image
.Format
= format
;
1745 emit_decl_buffer(struct ureg_program
*ureg
,
1749 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1752 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1753 out
[0].decl
.NrTokens
= 2;
1754 out
[0].decl
.File
= TGSI_FILE_BUFFER
;
1755 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1756 out
[0].decl
.Atomic
= atomic
;
1759 out
[1].decl_range
.First
= index
;
1760 out
[1].decl_range
.Last
= index
;
1764 emit_decl_memory(struct ureg_program
*ureg
, unsigned memory_type
)
1766 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1769 out
[0].decl
.Type
= TGSI_TOKEN_TYPE_DECLARATION
;
1770 out
[0].decl
.NrTokens
= 2;
1771 out
[0].decl
.File
= TGSI_FILE_MEMORY
;
1772 out
[0].decl
.UsageMask
= TGSI_WRITEMASK_XYZW
;
1773 out
[0].decl
.MemType
= memory_type
;
1776 out
[1].decl_range
.First
= memory_type
;
1777 out
[1].decl_range
.Last
= memory_type
;
1781 emit_immediate( struct ureg_program
*ureg
,
1785 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 5 );
1788 out
[0].imm
.Type
= TGSI_TOKEN_TYPE_IMMEDIATE
;
1789 out
[0].imm
.NrTokens
= 5;
1790 out
[0].imm
.DataType
= type
;
1791 out
[0].imm
.Padding
= 0;
1793 out
[1].imm_data
.Uint
= v
[0];
1794 out
[2].imm_data
.Uint
= v
[1];
1795 out
[3].imm_data
.Uint
= v
[2];
1796 out
[4].imm_data
.Uint
= v
[3];
1800 emit_property(struct ureg_program
*ureg
,
1804 union tgsi_any_token
*out
= get_tokens(ureg
, DOMAIN_DECL
, 2);
1807 out
[0].prop
.Type
= TGSI_TOKEN_TYPE_PROPERTY
;
1808 out
[0].prop
.NrTokens
= 2;
1809 out
[0].prop
.PropertyName
= name
;
1811 out
[1].prop_data
.Data
= data
;
1815 static void emit_decls( struct ureg_program
*ureg
)
1819 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
1820 if (ureg
->properties
[i
] != ~0u)
1821 emit_property(ureg
, i
, ureg
->properties
[i
]);
1823 if (ureg
->processor
== PIPE_SHADER_VERTEX
) {
1824 for (i
= 0; i
< PIPE_MAX_ATTRIBS
; i
++) {
1825 if (ureg
->vs_inputs
[i
/32] & (1u << (i
%32))) {
1826 emit_decl_range( ureg
, TGSI_FILE_INPUT
, i
, 1 );
1829 } else if (ureg
->processor
== PIPE_SHADER_FRAGMENT
) {
1830 if (ureg
->supports_any_inout_decl_range
) {
1831 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1834 ureg
->input
[i
].first
,
1835 ureg
->input
[i
].last
,
1836 ureg
->input
[i
].semantic_name
,
1837 ureg
->input
[i
].semantic_index
,
1838 ureg
->input
[i
].interp
,
1839 ureg
->input
[i
].cylindrical_wrap
,
1840 ureg
->input
[i
].interp_location
,
1841 ureg
->input
[i
].array_id
,
1842 ureg
->input
[i
].usage_mask
);
1846 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1847 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1851 ureg
->input
[i
].semantic_name
,
1852 ureg
->input
[i
].semantic_index
+
1853 (j
- ureg
->input
[i
].first
),
1854 ureg
->input
[i
].interp
,
1855 ureg
->input
[i
].cylindrical_wrap
,
1856 ureg
->input
[i
].interp_location
, 0,
1857 ureg
->input
[i
].usage_mask
);
1862 if (ureg
->supports_any_inout_decl_range
) {
1863 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1864 emit_decl_semantic(ureg
,
1866 ureg
->input
[i
].first
,
1867 ureg
->input
[i
].last
,
1868 ureg
->input
[i
].semantic_name
,
1869 ureg
->input
[i
].semantic_index
,
1871 TGSI_WRITEMASK_XYZW
,
1872 ureg
->input
[i
].array_id
);
1876 for (i
= 0; i
< ureg
->nr_inputs
; i
++) {
1877 for (j
= ureg
->input
[i
].first
; j
<= ureg
->input
[i
].last
; j
++) {
1878 emit_decl_semantic(ureg
,
1881 ureg
->input
[i
].semantic_name
,
1882 ureg
->input
[i
].semantic_index
+
1883 (j
- ureg
->input
[i
].first
),
1885 TGSI_WRITEMASK_XYZW
, 0);
1891 for (i
= 0; i
< ureg
->nr_system_values
; i
++) {
1892 emit_decl_semantic(ureg
,
1893 TGSI_FILE_SYSTEM_VALUE
,
1896 ureg
->system_value
[i
].semantic_name
,
1897 ureg
->system_value
[i
].semantic_index
,
1899 TGSI_WRITEMASK_XYZW
, 0);
1902 if (ureg
->supports_any_inout_decl_range
) {
1903 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1904 emit_decl_semantic(ureg
,
1906 ureg
->output
[i
].first
,
1907 ureg
->output
[i
].last
,
1908 ureg
->output
[i
].semantic_name
,
1909 ureg
->output
[i
].semantic_index
,
1910 ureg
->output
[i
].streams
,
1911 ureg
->output
[i
].usage_mask
,
1912 ureg
->output
[i
].array_id
);
1916 for (i
= 0; i
< ureg
->nr_outputs
; i
++) {
1917 for (j
= ureg
->output
[i
].first
; j
<= ureg
->output
[i
].last
; j
++) {
1918 emit_decl_semantic(ureg
,
1921 ureg
->output
[i
].semantic_name
,
1922 ureg
->output
[i
].semantic_index
+
1923 (j
- ureg
->output
[i
].first
),
1924 ureg
->output
[i
].streams
,
1925 ureg
->output
[i
].usage_mask
, 0);
1930 for (i
= 0; i
< ureg
->nr_samplers
; i
++) {
1931 emit_decl_range( ureg
,
1933 ureg
->sampler
[i
].Index
, 1 );
1936 for (i
= 0; i
< ureg
->nr_sampler_views
; i
++) {
1937 emit_decl_sampler_view(ureg
,
1938 ureg
->sampler_view
[i
].index
,
1939 ureg
->sampler_view
[i
].target
,
1940 ureg
->sampler_view
[i
].return_type_x
,
1941 ureg
->sampler_view
[i
].return_type_y
,
1942 ureg
->sampler_view
[i
].return_type_z
,
1943 ureg
->sampler_view
[i
].return_type_w
);
1946 for (i
= 0; i
< ureg
->nr_images
; i
++) {
1947 emit_decl_image(ureg
,
1948 ureg
->image
[i
].index
,
1949 ureg
->image
[i
].target
,
1950 ureg
->image
[i
].format
,
1952 ureg
->image
[i
].raw
);
1955 for (i
= 0; i
< ureg
->nr_buffers
; i
++) {
1956 emit_decl_buffer(ureg
, ureg
->buffer
[i
].index
, ureg
->buffer
[i
].atomic
);
1959 for (i
= 0; i
< TGSI_MEMORY_TYPE_COUNT
; i
++) {
1960 if (ureg
->use_memory
[i
])
1961 emit_decl_memory(ureg
, i
);
1964 for (i
= 0; i
< PIPE_MAX_CONSTANT_BUFFERS
; i
++) {
1965 struct const_decl
*decl
= &ureg
->const_decls
[i
];
1967 if (decl
->nr_constant_ranges
) {
1970 for (j
= 0; j
< decl
->nr_constant_ranges
; j
++) {
1971 emit_decl_range2D(ureg
,
1973 decl
->constant_range
[j
].first
,
1974 decl
->constant_range
[j
].last
,
1980 for (i
= 0; i
< PIPE_MAX_HW_ATOMIC_BUFFERS
; i
++) {
1981 struct hw_atomic_decl
*decl
= &ureg
->hw_atomic_decls
[i
];
1983 if (decl
->nr_hw_atomic_ranges
) {
1986 for (j
= 0; j
< decl
->nr_hw_atomic_ranges
; j
++) {
1987 emit_decl_atomic_2d(ureg
,
1988 decl
->hw_atomic_range
[j
].first
,
1989 decl
->hw_atomic_range
[j
].last
,
1991 decl
->hw_atomic_range
[j
].array_id
);
1996 if (ureg
->nr_temps
) {
1998 for (i
= 0; i
< ureg
->nr_temps
;) {
1999 boolean local
= util_bitmask_get(ureg
->local_temps
, i
);
2001 i
= util_bitmask_get_next_index(ureg
->decl_temps
, i
+ 1);
2002 if (i
== UTIL_BITMASK_INVALID_INDEX
)
2005 if (array
< ureg
->nr_array_temps
&& ureg
->array_temps
[array
] == first
)
2006 emit_decl_temps( ureg
, first
, i
- 1, local
, ++array
);
2008 emit_decl_temps( ureg
, first
, i
- 1, local
, 0 );
2012 if (ureg
->nr_addrs
) {
2013 emit_decl_range( ureg
,
2015 0, ureg
->nr_addrs
);
2018 for (i
= 0; i
< ureg
->nr_immediates
; i
++) {
2019 emit_immediate( ureg
,
2020 ureg
->immediate
[i
].value
.u
,
2021 ureg
->immediate
[i
].type
);
2025 /* Append the instruction tokens onto the declarations to build a
2026 * contiguous stream suitable to send to the driver.
2028 static void copy_instructions( struct ureg_program
*ureg
)
2030 unsigned nr_tokens
= ureg
->domain
[DOMAIN_INSN
].count
;
2031 union tgsi_any_token
*out
= get_tokens( ureg
,
2036 ureg
->domain
[DOMAIN_INSN
].tokens
,
2037 nr_tokens
* sizeof out
[0] );
2042 fixup_header_size(struct ureg_program
*ureg
)
2044 union tgsi_any_token
*out
= retrieve_token( ureg
, DOMAIN_DECL
, 0 );
2046 out
->header
.BodySize
= ureg
->domain
[DOMAIN_DECL
].count
- 2;
2051 emit_header( struct ureg_program
*ureg
)
2053 union tgsi_any_token
*out
= get_tokens( ureg
, DOMAIN_DECL
, 2 );
2055 out
[0].header
.HeaderSize
= 2;
2056 out
[0].header
.BodySize
= 0;
2058 out
[1].processor
.Processor
= ureg
->processor
;
2059 out
[1].processor
.Padding
= 0;
2063 const struct tgsi_token
*ureg_finalize( struct ureg_program
*ureg
)
2065 const struct tgsi_token
*tokens
;
2067 switch (ureg
->processor
) {
2068 case PIPE_SHADER_VERTEX
:
2069 case PIPE_SHADER_TESS_EVAL
:
2070 ureg_property(ureg
, TGSI_PROPERTY_NEXT_SHADER
,
2071 ureg
->next_shader_processor
== -1 ?
2072 PIPE_SHADER_FRAGMENT
:
2073 ureg
->next_shader_processor
);
2079 emit_header( ureg
);
2081 copy_instructions( ureg
);
2082 fixup_header_size( ureg
);
2084 if (ureg
->domain
[0].tokens
== error_tokens
||
2085 ureg
->domain
[1].tokens
== error_tokens
) {
2086 debug_printf("%s: error in generated shader\n", __FUNCTION__
);
2091 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2094 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__
,
2095 ureg
->domain
[DOMAIN_DECL
].count
);
2096 tgsi_dump( tokens
, 0 );
2100 if (tokens
&& !tgsi_sanity_check(tokens
)) {
2101 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
2102 tgsi_dump(tokens
, 0);
2112 void *ureg_create_shader( struct ureg_program
*ureg
,
2113 struct pipe_context
*pipe
,
2114 const struct pipe_stream_output_info
*so
)
2116 struct pipe_shader_state state
;
2118 pipe_shader_state_from_tgsi(&state
, ureg_finalize(ureg
));
2123 state
.stream_output
= *so
;
2125 switch (ureg
->processor
) {
2126 case PIPE_SHADER_VERTEX
:
2127 return pipe
->create_vs_state(pipe
, &state
);
2128 case PIPE_SHADER_TESS_CTRL
:
2129 return pipe
->create_tcs_state(pipe
, &state
);
2130 case PIPE_SHADER_TESS_EVAL
:
2131 return pipe
->create_tes_state(pipe
, &state
);
2132 case PIPE_SHADER_GEOMETRY
:
2133 return pipe
->create_gs_state(pipe
, &state
);
2134 case PIPE_SHADER_FRAGMENT
:
2135 return pipe
->create_fs_state(pipe
, &state
);
2142 const struct tgsi_token
*ureg_get_tokens( struct ureg_program
*ureg
,
2143 unsigned *nr_tokens
)
2145 const struct tgsi_token
*tokens
;
2147 ureg_finalize(ureg
);
2149 tokens
= &ureg
->domain
[DOMAIN_DECL
].tokens
[0].token
;
2152 *nr_tokens
= ureg
->domain
[DOMAIN_DECL
].count
;
2154 ureg
->domain
[DOMAIN_DECL
].tokens
= 0;
2155 ureg
->domain
[DOMAIN_DECL
].size
= 0;
2156 ureg
->domain
[DOMAIN_DECL
].order
= 0;
2157 ureg
->domain
[DOMAIN_DECL
].count
= 0;
2163 void ureg_free_tokens( const struct tgsi_token
*tokens
)
2165 FREE((struct tgsi_token
*)tokens
);
2169 struct ureg_program
*
2170 ureg_create(enum pipe_shader_type processor
)
2172 return ureg_create_with_screen(processor
, NULL
);
2176 struct ureg_program
*
2177 ureg_create_with_screen(enum pipe_shader_type processor
,
2178 struct pipe_screen
*screen
)
2181 struct ureg_program
*ureg
= CALLOC_STRUCT( ureg_program
);
2185 ureg
->processor
= processor
;
2186 ureg
->supports_any_inout_decl_range
=
2188 screen
->get_shader_param(screen
, processor
,
2189 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE
) != 0;
2190 ureg
->next_shader_processor
= -1;
2192 for (i
= 0; i
< ARRAY_SIZE(ureg
->properties
); i
++)
2193 ureg
->properties
[i
] = ~0;
2195 ureg
->free_temps
= util_bitmask_create();
2196 if (ureg
->free_temps
== NULL
)
2199 ureg
->local_temps
= util_bitmask_create();
2200 if (ureg
->local_temps
== NULL
)
2201 goto no_local_temps
;
2203 ureg
->decl_temps
= util_bitmask_create();
2204 if (ureg
->decl_temps
== NULL
)
2210 util_bitmask_destroy(ureg
->local_temps
);
2212 util_bitmask_destroy(ureg
->free_temps
);
2221 ureg_set_next_shader_processor(struct ureg_program
*ureg
, unsigned processor
)
2223 ureg
->next_shader_processor
= processor
;
2228 ureg_get_nr_outputs( const struct ureg_program
*ureg
)
2232 return ureg
->nr_outputs
;
2236 void ureg_destroy( struct ureg_program
*ureg
)
2240 for (i
= 0; i
< ARRAY_SIZE(ureg
->domain
); i
++) {
2241 if (ureg
->domain
[i
].tokens
&&
2242 ureg
->domain
[i
].tokens
!= error_tokens
)
2243 FREE(ureg
->domain
[i
].tokens
);
2246 util_bitmask_destroy(ureg
->free_temps
);
2247 util_bitmask_destroy(ureg
->local_temps
);
2248 util_bitmask_destroy(ureg
->decl_temps
);