tgsi: use TGSI enum types in ureg code
[mesa.git] / src / gallium / auxiliary / tgsi / tgsi_ureg.c
1 /**************************************************************************
2 *
3 * Copyright 2009-2010 VMware, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL VMWARE, INC AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28
29 #include "pipe/p_screen.h"
30 #include "pipe/p_context.h"
31 #include "pipe/p_state.h"
32 #include "tgsi/tgsi_ureg.h"
33 #include "tgsi/tgsi_build.h"
34 #include "tgsi/tgsi_info.h"
35 #include "tgsi/tgsi_dump.h"
36 #include "tgsi/tgsi_sanity.h"
37 #include "util/u_debug.h"
38 #include "util/u_inlines.h"
39 #include "util/u_memory.h"
40 #include "util/u_math.h"
41 #include "util/u_bitmask.h"
42
43 union tgsi_any_token {
44 struct tgsi_header header;
45 struct tgsi_processor processor;
46 struct tgsi_token token;
47 struct tgsi_property prop;
48 struct tgsi_property_data prop_data;
49 struct tgsi_declaration decl;
50 struct tgsi_declaration_range decl_range;
51 struct tgsi_declaration_dimension decl_dim;
52 struct tgsi_declaration_interp decl_interp;
53 struct tgsi_declaration_image decl_image;
54 struct tgsi_declaration_semantic decl_semantic;
55 struct tgsi_declaration_sampler_view decl_sampler_view;
56 struct tgsi_declaration_array array;
57 struct tgsi_immediate imm;
58 union tgsi_immediate_data imm_data;
59 struct tgsi_instruction insn;
60 struct tgsi_instruction_label insn_label;
61 struct tgsi_instruction_texture insn_texture;
62 struct tgsi_instruction_memory insn_memory;
63 struct tgsi_texture_offset insn_texture_offset;
64 struct tgsi_src_register src;
65 struct tgsi_ind_register ind;
66 struct tgsi_dimension dim;
67 struct tgsi_dst_register dst;
68 unsigned value;
69 };
70
71
72 struct ureg_tokens {
73 union tgsi_any_token *tokens;
74 unsigned size;
75 unsigned order;
76 unsigned count;
77 };
78
79 #define UREG_MAX_INPUT (4 * PIPE_MAX_SHADER_INPUTS)
80 #define UREG_MAX_SYSTEM_VALUE PIPE_MAX_ATTRIBS
81 #define UREG_MAX_OUTPUT (4 * PIPE_MAX_SHADER_OUTPUTS)
82 #define UREG_MAX_CONSTANT_RANGE 32
83 #define UREG_MAX_HW_ATOMIC_RANGE 32
84 #define UREG_MAX_IMMEDIATE 4096
85 #define UREG_MAX_ADDR 3
86 #define UREG_MAX_ARRAY_TEMPS 256
87
88 struct const_decl {
89 struct {
90 unsigned first;
91 unsigned last;
92 } constant_range[UREG_MAX_CONSTANT_RANGE];
93 unsigned nr_constant_ranges;
94 };
95
96 struct hw_atomic_decl {
97 struct {
98 unsigned first;
99 unsigned last;
100 unsigned array_id;
101 } hw_atomic_range[UREG_MAX_HW_ATOMIC_RANGE];
102 unsigned nr_hw_atomic_ranges;
103 };
104
105 #define DOMAIN_DECL 0
106 #define DOMAIN_INSN 1
107
108 struct ureg_program
109 {
110 unsigned processor;
111 bool supports_any_inout_decl_range;
112 int next_shader_processor;
113
114 struct {
115 unsigned semantic_name;
116 unsigned semantic_index;
117 enum tgsi_interpolate_mode interp;
118 unsigned char cylindrical_wrap;
119 unsigned char usage_mask;
120 enum tgsi_interpolate_loc interp_location;
121 unsigned first;
122 unsigned last;
123 unsigned array_id;
124 } input[UREG_MAX_INPUT];
125 unsigned nr_inputs, nr_input_regs;
126
127 unsigned vs_inputs[PIPE_MAX_ATTRIBS/32];
128
129 struct {
130 unsigned semantic_name;
131 unsigned semantic_index;
132 } system_value[UREG_MAX_SYSTEM_VALUE];
133 unsigned nr_system_values;
134
135 struct {
136 unsigned semantic_name;
137 unsigned semantic_index;
138 unsigned streams;
139 unsigned usage_mask; /* = TGSI_WRITEMASK_* */
140 unsigned first;
141 unsigned last;
142 unsigned array_id;
143 } output[UREG_MAX_OUTPUT];
144 unsigned nr_outputs, nr_output_regs;
145
146 struct {
147 union {
148 float f[4];
149 unsigned u[4];
150 int i[4];
151 } value;
152 unsigned nr;
153 unsigned type;
154 } immediate[UREG_MAX_IMMEDIATE];
155 unsigned nr_immediates;
156
157 struct ureg_src sampler[PIPE_MAX_SAMPLERS];
158 unsigned nr_samplers;
159
160 struct {
161 unsigned index;
162 enum tgsi_texture_type target;
163 enum tgsi_return_type return_type_x;
164 enum tgsi_return_type return_type_y;
165 enum tgsi_return_type return_type_z;
166 enum tgsi_return_type return_type_w;
167 } sampler_view[PIPE_MAX_SHADER_SAMPLER_VIEWS];
168 unsigned nr_sampler_views;
169
170 struct {
171 unsigned index;
172 enum tgsi_texture_type target;
173 unsigned format;
174 boolean wr;
175 boolean raw;
176 } image[PIPE_MAX_SHADER_IMAGES];
177 unsigned nr_images;
178
179 struct {
180 unsigned index;
181 bool atomic;
182 } buffer[PIPE_MAX_SHADER_BUFFERS];
183 unsigned nr_buffers;
184
185 struct util_bitmask *free_temps;
186 struct util_bitmask *local_temps;
187 struct util_bitmask *decl_temps;
188 unsigned nr_temps;
189
190 unsigned array_temps[UREG_MAX_ARRAY_TEMPS];
191 unsigned nr_array_temps;
192
193 struct const_decl const_decls[PIPE_MAX_CONSTANT_BUFFERS];
194
195 struct hw_atomic_decl hw_atomic_decls[PIPE_MAX_HW_ATOMIC_BUFFERS];
196
197 unsigned properties[TGSI_PROPERTY_COUNT];
198
199 unsigned nr_addrs;
200 unsigned nr_instructions;
201
202 struct ureg_tokens domain[2];
203
204 bool use_memory[TGSI_MEMORY_TYPE_COUNT];
205 };
206
207 static union tgsi_any_token error_tokens[32];
208
209 static void tokens_error( struct ureg_tokens *tokens )
210 {
211 if (tokens->tokens && tokens->tokens != error_tokens)
212 FREE(tokens->tokens);
213
214 tokens->tokens = error_tokens;
215 tokens->size = ARRAY_SIZE(error_tokens);
216 tokens->count = 0;
217 }
218
219
220 static void tokens_expand( struct ureg_tokens *tokens,
221 unsigned count )
222 {
223 unsigned old_size = tokens->size * sizeof(unsigned);
224
225 if (tokens->tokens == error_tokens) {
226 return;
227 }
228
229 while (tokens->count + count > tokens->size) {
230 tokens->size = (1 << ++tokens->order);
231 }
232
233 tokens->tokens = REALLOC(tokens->tokens,
234 old_size,
235 tokens->size * sizeof(unsigned));
236 if (tokens->tokens == NULL) {
237 tokens_error(tokens);
238 }
239 }
240
241 static void set_bad( struct ureg_program *ureg )
242 {
243 tokens_error(&ureg->domain[0]);
244 }
245
246
247
248 static union tgsi_any_token *get_tokens( struct ureg_program *ureg,
249 unsigned domain,
250 unsigned count )
251 {
252 struct ureg_tokens *tokens = &ureg->domain[domain];
253 union tgsi_any_token *result;
254
255 if (tokens->count + count > tokens->size)
256 tokens_expand(tokens, count);
257
258 result = &tokens->tokens[tokens->count];
259 tokens->count += count;
260 return result;
261 }
262
263
264 static union tgsi_any_token *retrieve_token( struct ureg_program *ureg,
265 unsigned domain,
266 unsigned nr )
267 {
268 if (ureg->domain[domain].tokens == error_tokens)
269 return &error_tokens[0];
270
271 return &ureg->domain[domain].tokens[nr];
272 }
273
274
275 void
276 ureg_property(struct ureg_program *ureg, unsigned name, unsigned value)
277 {
278 assert(name < ARRAY_SIZE(ureg->properties));
279 ureg->properties[name] = value;
280 }
281
282 struct ureg_src
283 ureg_DECL_fs_input_cyl_centroid_layout(struct ureg_program *ureg,
284 unsigned semantic_name,
285 unsigned semantic_index,
286 enum tgsi_interpolate_mode interp_mode,
287 unsigned cylindrical_wrap,
288 enum tgsi_interpolate_loc interp_location,
289 unsigned index,
290 unsigned usage_mask,
291 unsigned array_id,
292 unsigned array_size)
293 {
294 unsigned i;
295
296 assert(usage_mask != 0);
297 assert(usage_mask <= TGSI_WRITEMASK_XYZW);
298
299 for (i = 0; i < ureg->nr_inputs; i++) {
300 if (ureg->input[i].semantic_name == semantic_name &&
301 ureg->input[i].semantic_index == semantic_index) {
302 assert(ureg->input[i].interp == interp_mode);
303 assert(ureg->input[i].cylindrical_wrap == cylindrical_wrap);
304 assert(ureg->input[i].interp_location == interp_location);
305 if (ureg->input[i].array_id == array_id) {
306 ureg->input[i].usage_mask |= usage_mask;
307 goto out;
308 }
309 assert((ureg->input[i].usage_mask & usage_mask) == 0);
310 }
311 }
312
313 if (ureg->nr_inputs < UREG_MAX_INPUT) {
314 assert(array_size >= 1);
315 ureg->input[i].semantic_name = semantic_name;
316 ureg->input[i].semantic_index = semantic_index;
317 ureg->input[i].interp = interp_mode;
318 ureg->input[i].cylindrical_wrap = cylindrical_wrap;
319 ureg->input[i].interp_location = interp_location;
320 ureg->input[i].first = index;
321 ureg->input[i].last = index + array_size - 1;
322 ureg->input[i].array_id = array_id;
323 ureg->input[i].usage_mask = usage_mask;
324 ureg->nr_input_regs = MAX2(ureg->nr_input_regs, index + array_size);
325 ureg->nr_inputs++;
326 } else {
327 set_bad(ureg);
328 }
329
330 out:
331 return ureg_src_array_register(TGSI_FILE_INPUT, ureg->input[i].first,
332 array_id);
333 }
334
335 struct ureg_src
336 ureg_DECL_fs_input_cyl_centroid(struct ureg_program *ureg,
337 unsigned semantic_name,
338 unsigned semantic_index,
339 enum tgsi_interpolate_mode interp_mode,
340 unsigned cylindrical_wrap,
341 enum tgsi_interpolate_loc interp_location,
342 unsigned array_id,
343 unsigned array_size)
344 {
345 return ureg_DECL_fs_input_cyl_centroid_layout(ureg,
346 semantic_name, semantic_index, interp_mode,
347 cylindrical_wrap, interp_location,
348 ureg->nr_input_regs, TGSI_WRITEMASK_XYZW, array_id, array_size);
349 }
350
351
352 struct ureg_src
353 ureg_DECL_vs_input( struct ureg_program *ureg,
354 unsigned index )
355 {
356 assert(ureg->processor == PIPE_SHADER_VERTEX);
357 assert(index / 32 < ARRAY_SIZE(ureg->vs_inputs));
358
359 ureg->vs_inputs[index/32] |= 1 << (index % 32);
360 return ureg_src_register( TGSI_FILE_INPUT, index );
361 }
362
363
364 struct ureg_src
365 ureg_DECL_input_layout(struct ureg_program *ureg,
366 unsigned semantic_name,
367 unsigned semantic_index,
368 unsigned index,
369 unsigned usage_mask,
370 unsigned array_id,
371 unsigned array_size)
372 {
373 return ureg_DECL_fs_input_cyl_centroid_layout(ureg,
374 semantic_name, semantic_index, 0, 0, 0,
375 index, usage_mask, array_id, array_size);
376 }
377
378
379 struct ureg_src
380 ureg_DECL_input(struct ureg_program *ureg,
381 unsigned semantic_name,
382 unsigned semantic_index,
383 unsigned array_id,
384 unsigned array_size)
385 {
386 return ureg_DECL_fs_input_cyl_centroid(ureg, semantic_name, semantic_index,
387 0, 0, 0, array_id, array_size);
388 }
389
390
391 struct ureg_src
392 ureg_DECL_system_value(struct ureg_program *ureg,
393 unsigned semantic_name,
394 unsigned semantic_index)
395 {
396 unsigned i;
397
398 for (i = 0; i < ureg->nr_system_values; i++) {
399 if (ureg->system_value[i].semantic_name == semantic_name &&
400 ureg->system_value[i].semantic_index == semantic_index) {
401 goto out;
402 }
403 }
404
405 if (ureg->nr_system_values < UREG_MAX_SYSTEM_VALUE) {
406 ureg->system_value[ureg->nr_system_values].semantic_name = semantic_name;
407 ureg->system_value[ureg->nr_system_values].semantic_index = semantic_index;
408 i = ureg->nr_system_values;
409 ureg->nr_system_values++;
410 } else {
411 set_bad(ureg);
412 }
413
414 out:
415 return ureg_src_register(TGSI_FILE_SYSTEM_VALUE, i);
416 }
417
418
419 struct ureg_dst
420 ureg_DECL_output_layout(struct ureg_program *ureg,
421 unsigned semantic_name,
422 unsigned semantic_index,
423 unsigned streams,
424 unsigned index,
425 unsigned usage_mask,
426 unsigned array_id,
427 unsigned array_size)
428 {
429 unsigned i;
430
431 assert(usage_mask != 0);
432 assert(!(streams & 0x03) || (usage_mask & 1));
433 assert(!(streams & 0x0c) || (usage_mask & 2));
434 assert(!(streams & 0x30) || (usage_mask & 4));
435 assert(!(streams & 0xc0) || (usage_mask & 8));
436
437 for (i = 0; i < ureg->nr_outputs; i++) {
438 if (ureg->output[i].semantic_name == semantic_name &&
439 ureg->output[i].semantic_index == semantic_index) {
440 if (ureg->output[i].array_id == array_id) {
441 ureg->output[i].usage_mask |= usage_mask;
442 goto out;
443 }
444 assert((ureg->output[i].usage_mask & usage_mask) == 0);
445 }
446 }
447
448 if (ureg->nr_outputs < UREG_MAX_OUTPUT) {
449 ureg->output[i].semantic_name = semantic_name;
450 ureg->output[i].semantic_index = semantic_index;
451 ureg->output[i].usage_mask = usage_mask;
452 ureg->output[i].first = index;
453 ureg->output[i].last = index + array_size - 1;
454 ureg->output[i].array_id = array_id;
455 ureg->nr_output_regs = MAX2(ureg->nr_output_regs, index + array_size);
456 ureg->nr_outputs++;
457 }
458 else {
459 set_bad( ureg );
460 i = 0;
461 }
462
463 out:
464 ureg->output[i].streams |= streams;
465
466 return ureg_dst_array_register(TGSI_FILE_OUTPUT, ureg->output[i].first,
467 array_id);
468 }
469
470
471 struct ureg_dst
472 ureg_DECL_output_masked(struct ureg_program *ureg,
473 unsigned name,
474 unsigned index,
475 unsigned usage_mask,
476 unsigned array_id,
477 unsigned array_size)
478 {
479 return ureg_DECL_output_layout(ureg, name, index, 0,
480 ureg->nr_output_regs, usage_mask, array_id, array_size);
481 }
482
483
484 struct ureg_dst
485 ureg_DECL_output(struct ureg_program *ureg,
486 unsigned name,
487 unsigned index)
488 {
489 return ureg_DECL_output_masked(ureg, name, index, TGSI_WRITEMASK_XYZW,
490 0, 1);
491 }
492
493 struct ureg_dst
494 ureg_DECL_output_array(struct ureg_program *ureg,
495 unsigned semantic_name,
496 unsigned semantic_index,
497 unsigned array_id,
498 unsigned array_size)
499 {
500 return ureg_DECL_output_masked(ureg, semantic_name, semantic_index,
501 TGSI_WRITEMASK_XYZW,
502 array_id, array_size);
503 }
504
505
506 /* Returns a new constant register. Keep track of which have been
507 * referred to so that we can emit decls later.
508 *
509 * Constant operands declared with this function must be addressed
510 * with a two-dimensional index.
511 *
512 * There is nothing in this code to bind this constant to any tracked
513 * value or manage any constant_buffer contents -- that's the
514 * resposibility of the calling code.
515 */
516 void
517 ureg_DECL_constant2D(struct ureg_program *ureg,
518 unsigned first,
519 unsigned last,
520 unsigned index2D)
521 {
522 struct const_decl *decl = &ureg->const_decls[index2D];
523
524 assert(index2D < PIPE_MAX_CONSTANT_BUFFERS);
525
526 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
527 uint i = decl->nr_constant_ranges++;
528
529 decl->constant_range[i].first = first;
530 decl->constant_range[i].last = last;
531 }
532 }
533
534
535 /* A one-dimensional, deprecated version of ureg_DECL_constant2D().
536 *
537 * Constant operands declared with this function must be addressed
538 * with a one-dimensional index.
539 */
540 struct ureg_src
541 ureg_DECL_constant(struct ureg_program *ureg,
542 unsigned index)
543 {
544 struct const_decl *decl = &ureg->const_decls[0];
545 unsigned minconst = index, maxconst = index;
546 unsigned i;
547
548 /* Inside existing range?
549 */
550 for (i = 0; i < decl->nr_constant_ranges; i++) {
551 if (decl->constant_range[i].first <= index &&
552 decl->constant_range[i].last >= index) {
553 goto out;
554 }
555 }
556
557 /* Extend existing range?
558 */
559 for (i = 0; i < decl->nr_constant_ranges; i++) {
560 if (decl->constant_range[i].last == index - 1) {
561 decl->constant_range[i].last = index;
562 goto out;
563 }
564
565 if (decl->constant_range[i].first == index + 1) {
566 decl->constant_range[i].first = index;
567 goto out;
568 }
569
570 minconst = MIN2(minconst, decl->constant_range[i].first);
571 maxconst = MAX2(maxconst, decl->constant_range[i].last);
572 }
573
574 /* Create new range?
575 */
576 if (decl->nr_constant_ranges < UREG_MAX_CONSTANT_RANGE) {
577 i = decl->nr_constant_ranges++;
578 decl->constant_range[i].first = index;
579 decl->constant_range[i].last = index;
580 goto out;
581 }
582
583 /* Collapse all ranges down to one:
584 */
585 i = 0;
586 decl->constant_range[0].first = minconst;
587 decl->constant_range[0].last = maxconst;
588 decl->nr_constant_ranges = 1;
589
590 out:
591 assert(i < decl->nr_constant_ranges);
592 assert(decl->constant_range[i].first <= index);
593 assert(decl->constant_range[i].last >= index);
594
595 struct ureg_src src = ureg_src_register(TGSI_FILE_CONSTANT, index);
596 return ureg_src_dimension(src, 0);
597 }
598
599
600 /* Returns a new hw atomic register. Keep track of which have been
601 * referred to so that we can emit decls later.
602 */
603 void
604 ureg_DECL_hw_atomic(struct ureg_program *ureg,
605 unsigned first,
606 unsigned last,
607 unsigned buffer_id,
608 unsigned array_id)
609 {
610 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[buffer_id];
611
612 if (decl->nr_hw_atomic_ranges < UREG_MAX_HW_ATOMIC_RANGE) {
613 uint i = decl->nr_hw_atomic_ranges++;
614
615 decl->hw_atomic_range[i].first = first;
616 decl->hw_atomic_range[i].last = last;
617 decl->hw_atomic_range[i].array_id = array_id;
618 } else {
619 set_bad(ureg);
620 }
621 }
622
623 static struct ureg_dst alloc_temporary( struct ureg_program *ureg,
624 boolean local )
625 {
626 unsigned i;
627
628 /* Look for a released temporary.
629 */
630 for (i = util_bitmask_get_first_index(ureg->free_temps);
631 i != UTIL_BITMASK_INVALID_INDEX;
632 i = util_bitmask_get_next_index(ureg->free_temps, i + 1)) {
633 if (util_bitmask_get(ureg->local_temps, i) == local)
634 break;
635 }
636
637 /* Or allocate a new one.
638 */
639 if (i == UTIL_BITMASK_INVALID_INDEX) {
640 i = ureg->nr_temps++;
641
642 if (local)
643 util_bitmask_set(ureg->local_temps, i);
644
645 /* Start a new declaration when the local flag changes */
646 if (!i || util_bitmask_get(ureg->local_temps, i - 1) != local)
647 util_bitmask_set(ureg->decl_temps, i);
648 }
649
650 util_bitmask_clear(ureg->free_temps, i);
651
652 return ureg_dst_register( TGSI_FILE_TEMPORARY, i );
653 }
654
655 struct ureg_dst ureg_DECL_temporary( struct ureg_program *ureg )
656 {
657 return alloc_temporary(ureg, FALSE);
658 }
659
660 struct ureg_dst ureg_DECL_local_temporary( struct ureg_program *ureg )
661 {
662 return alloc_temporary(ureg, TRUE);
663 }
664
665 struct ureg_dst ureg_DECL_array_temporary( struct ureg_program *ureg,
666 unsigned size,
667 boolean local )
668 {
669 unsigned i = ureg->nr_temps;
670 struct ureg_dst dst = ureg_dst_register( TGSI_FILE_TEMPORARY, i );
671
672 if (local)
673 util_bitmask_set(ureg->local_temps, i);
674
675 /* Always start a new declaration at the start */
676 util_bitmask_set(ureg->decl_temps, i);
677
678 ureg->nr_temps += size;
679
680 /* and also at the end of the array */
681 util_bitmask_set(ureg->decl_temps, ureg->nr_temps);
682
683 if (ureg->nr_array_temps < UREG_MAX_ARRAY_TEMPS) {
684 ureg->array_temps[ureg->nr_array_temps++] = i;
685 dst.ArrayID = ureg->nr_array_temps;
686 }
687
688 return dst;
689 }
690
691 void ureg_release_temporary( struct ureg_program *ureg,
692 struct ureg_dst tmp )
693 {
694 if(tmp.File == TGSI_FILE_TEMPORARY)
695 util_bitmask_set(ureg->free_temps, tmp.Index);
696 }
697
698
699 /* Allocate a new address register.
700 */
701 struct ureg_dst ureg_DECL_address( struct ureg_program *ureg )
702 {
703 if (ureg->nr_addrs < UREG_MAX_ADDR)
704 return ureg_dst_register( TGSI_FILE_ADDRESS, ureg->nr_addrs++ );
705
706 assert( 0 );
707 return ureg_dst_register( TGSI_FILE_ADDRESS, 0 );
708 }
709
710 /* Allocate a new sampler.
711 */
712 struct ureg_src ureg_DECL_sampler( struct ureg_program *ureg,
713 unsigned nr )
714 {
715 unsigned i;
716
717 for (i = 0; i < ureg->nr_samplers; i++)
718 if (ureg->sampler[i].Index == nr)
719 return ureg->sampler[i];
720
721 if (i < PIPE_MAX_SAMPLERS) {
722 ureg->sampler[i] = ureg_src_register( TGSI_FILE_SAMPLER, nr );
723 ureg->nr_samplers++;
724 return ureg->sampler[i];
725 }
726
727 assert( 0 );
728 return ureg->sampler[0];
729 }
730
731 /*
732 * Allocate a new shader sampler view.
733 */
734 struct ureg_src
735 ureg_DECL_sampler_view(struct ureg_program *ureg,
736 unsigned index,
737 enum tgsi_texture_type target,
738 enum tgsi_return_type return_type_x,
739 enum tgsi_return_type return_type_y,
740 enum tgsi_return_type return_type_z,
741 enum tgsi_return_type return_type_w)
742 {
743 struct ureg_src reg = ureg_src_register(TGSI_FILE_SAMPLER_VIEW, index);
744 uint i;
745
746 for (i = 0; i < ureg->nr_sampler_views; i++) {
747 if (ureg->sampler_view[i].index == index) {
748 return reg;
749 }
750 }
751
752 if (i < PIPE_MAX_SHADER_SAMPLER_VIEWS) {
753 ureg->sampler_view[i].index = index;
754 ureg->sampler_view[i].target = target;
755 ureg->sampler_view[i].return_type_x = return_type_x;
756 ureg->sampler_view[i].return_type_y = return_type_y;
757 ureg->sampler_view[i].return_type_z = return_type_z;
758 ureg->sampler_view[i].return_type_w = return_type_w;
759 ureg->nr_sampler_views++;
760 return reg;
761 }
762
763 assert(0);
764 return reg;
765 }
766
767 /* Allocate a new image.
768 */
769 struct ureg_src
770 ureg_DECL_image(struct ureg_program *ureg,
771 unsigned index,
772 enum tgsi_texture_type target,
773 unsigned format,
774 boolean wr,
775 boolean raw)
776 {
777 struct ureg_src reg = ureg_src_register(TGSI_FILE_IMAGE, index);
778 unsigned i;
779
780 for (i = 0; i < ureg->nr_images; i++)
781 if (ureg->image[i].index == index)
782 return reg;
783
784 if (i < PIPE_MAX_SHADER_IMAGES) {
785 ureg->image[i].index = index;
786 ureg->image[i].target = target;
787 ureg->image[i].wr = wr;
788 ureg->image[i].raw = raw;
789 ureg->image[i].format = format;
790 ureg->nr_images++;
791 return reg;
792 }
793
794 assert(0);
795 return reg;
796 }
797
798 /* Allocate a new buffer.
799 */
800 struct ureg_src ureg_DECL_buffer(struct ureg_program *ureg, unsigned nr,
801 bool atomic)
802 {
803 struct ureg_src reg = ureg_src_register(TGSI_FILE_BUFFER, nr);
804 unsigned i;
805
806 for (i = 0; i < ureg->nr_buffers; i++)
807 if (ureg->buffer[i].index == nr)
808 return reg;
809
810 if (i < PIPE_MAX_SHADER_BUFFERS) {
811 ureg->buffer[i].index = nr;
812 ureg->buffer[i].atomic = atomic;
813 ureg->nr_buffers++;
814 return reg;
815 }
816
817 assert(0);
818 return reg;
819 }
820
821 /* Allocate a memory area.
822 */
823 struct ureg_src ureg_DECL_memory(struct ureg_program *ureg,
824 unsigned memory_type)
825 {
826 struct ureg_src reg = ureg_src_register(TGSI_FILE_MEMORY, memory_type);
827
828 ureg->use_memory[memory_type] = true;
829 return reg;
830 }
831
832 static int
833 match_or_expand_immediate64( const unsigned *v,
834 int type,
835 unsigned nr,
836 unsigned *v2,
837 unsigned *pnr2,
838 unsigned *swizzle )
839 {
840 unsigned nr2 = *pnr2;
841 unsigned i, j;
842 *swizzle = 0;
843
844 for (i = 0; i < nr; i += 2) {
845 boolean found = FALSE;
846
847 for (j = 0; j < nr2 && !found; j += 2) {
848 if (v[i] == v2[j] && v[i + 1] == v2[j + 1]) {
849 *swizzle |= (j << (i * 2)) | ((j + 1) << ((i + 1) * 2));
850 found = TRUE;
851 }
852 }
853 if (!found) {
854 if ((nr2) >= 4) {
855 return FALSE;
856 }
857
858 v2[nr2] = v[i];
859 v2[nr2 + 1] = v[i + 1];
860
861 *swizzle |= (nr2 << (i * 2)) | ((nr2 + 1) << ((i + 1) * 2));
862 nr2 += 2;
863 }
864 }
865
866 /* Actually expand immediate only when fully succeeded.
867 */
868 *pnr2 = nr2;
869 return TRUE;
870 }
871
872 static int
873 match_or_expand_immediate( const unsigned *v,
874 int type,
875 unsigned nr,
876 unsigned *v2,
877 unsigned *pnr2,
878 unsigned *swizzle )
879 {
880 unsigned nr2 = *pnr2;
881 unsigned i, j;
882
883 if (type == TGSI_IMM_FLOAT64 ||
884 type == TGSI_IMM_UINT64 ||
885 type == TGSI_IMM_INT64)
886 return match_or_expand_immediate64(v, type, nr, v2, pnr2, swizzle);
887
888 *swizzle = 0;
889
890 for (i = 0; i < nr; i++) {
891 boolean found = FALSE;
892
893 for (j = 0; j < nr2 && !found; j++) {
894 if (v[i] == v2[j]) {
895 *swizzle |= j << (i * 2);
896 found = TRUE;
897 }
898 }
899
900 if (!found) {
901 if (nr2 >= 4) {
902 return FALSE;
903 }
904
905 v2[nr2] = v[i];
906 *swizzle |= nr2 << (i * 2);
907 nr2++;
908 }
909 }
910
911 /* Actually expand immediate only when fully succeeded.
912 */
913 *pnr2 = nr2;
914 return TRUE;
915 }
916
917
918 static struct ureg_src
919 decl_immediate( struct ureg_program *ureg,
920 const unsigned *v,
921 unsigned nr,
922 unsigned type )
923 {
924 unsigned i, j;
925 unsigned swizzle = 0;
926
927 /* Could do a first pass where we examine all existing immediates
928 * without expanding.
929 */
930
931 for (i = 0; i < ureg->nr_immediates; i++) {
932 if (ureg->immediate[i].type != type) {
933 continue;
934 }
935 if (match_or_expand_immediate(v,
936 type,
937 nr,
938 ureg->immediate[i].value.u,
939 &ureg->immediate[i].nr,
940 &swizzle)) {
941 goto out;
942 }
943 }
944
945 if (ureg->nr_immediates < UREG_MAX_IMMEDIATE) {
946 i = ureg->nr_immediates++;
947 ureg->immediate[i].type = type;
948 if (match_or_expand_immediate(v,
949 type,
950 nr,
951 ureg->immediate[i].value.u,
952 &ureg->immediate[i].nr,
953 &swizzle)) {
954 goto out;
955 }
956 }
957
958 set_bad(ureg);
959
960 out:
961 /* Make sure that all referenced elements are from this immediate.
962 * Has the effect of making size-one immediates into scalars.
963 */
964 if (type == TGSI_IMM_FLOAT64 ||
965 type == TGSI_IMM_UINT64 ||
966 type == TGSI_IMM_INT64) {
967 for (j = nr; j < 4; j+=2) {
968 swizzle |= (swizzle & 0xf) << (j * 2);
969 }
970 } else {
971 for (j = nr; j < 4; j++) {
972 swizzle |= (swizzle & 0x3) << (j * 2);
973 }
974 }
975 return ureg_swizzle(ureg_src_register(TGSI_FILE_IMMEDIATE, i),
976 (swizzle >> 0) & 0x3,
977 (swizzle >> 2) & 0x3,
978 (swizzle >> 4) & 0x3,
979 (swizzle >> 6) & 0x3);
980 }
981
982
983 struct ureg_src
984 ureg_DECL_immediate( struct ureg_program *ureg,
985 const float *v,
986 unsigned nr )
987 {
988 union {
989 float f[4];
990 unsigned u[4];
991 } fu;
992 unsigned int i;
993
994 for (i = 0; i < nr; i++) {
995 fu.f[i] = v[i];
996 }
997
998 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT32);
999 }
1000
1001 struct ureg_src
1002 ureg_DECL_immediate_f64( struct ureg_program *ureg,
1003 const double *v,
1004 unsigned nr )
1005 {
1006 union {
1007 unsigned u[4];
1008 double d[2];
1009 } fu;
1010 unsigned int i;
1011
1012 assert((nr / 2) < 3);
1013 for (i = 0; i < nr / 2; i++) {
1014 fu.d[i] = v[i];
1015 }
1016
1017 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_FLOAT64);
1018 }
1019
1020 struct ureg_src
1021 ureg_DECL_immediate_uint( struct ureg_program *ureg,
1022 const unsigned *v,
1023 unsigned nr )
1024 {
1025 return decl_immediate(ureg, v, nr, TGSI_IMM_UINT32);
1026 }
1027
1028
1029 struct ureg_src
1030 ureg_DECL_immediate_block_uint( struct ureg_program *ureg,
1031 const unsigned *v,
1032 unsigned nr )
1033 {
1034 uint index;
1035 uint i;
1036
1037 if (ureg->nr_immediates + (nr + 3) / 4 > UREG_MAX_IMMEDIATE) {
1038 set_bad(ureg);
1039 return ureg_src_register(TGSI_FILE_IMMEDIATE, 0);
1040 }
1041
1042 index = ureg->nr_immediates;
1043 ureg->nr_immediates += (nr + 3) / 4;
1044
1045 for (i = index; i < ureg->nr_immediates; i++) {
1046 ureg->immediate[i].type = TGSI_IMM_UINT32;
1047 ureg->immediate[i].nr = nr > 4 ? 4 : nr;
1048 memcpy(ureg->immediate[i].value.u,
1049 &v[(i - index) * 4],
1050 ureg->immediate[i].nr * sizeof(uint));
1051 nr -= 4;
1052 }
1053
1054 return ureg_src_register(TGSI_FILE_IMMEDIATE, index);
1055 }
1056
1057
1058 struct ureg_src
1059 ureg_DECL_immediate_int( struct ureg_program *ureg,
1060 const int *v,
1061 unsigned nr )
1062 {
1063 return decl_immediate(ureg, (const unsigned *)v, nr, TGSI_IMM_INT32);
1064 }
1065
1066 struct ureg_src
1067 ureg_DECL_immediate_uint64( struct ureg_program *ureg,
1068 const uint64_t *v,
1069 unsigned nr )
1070 {
1071 union {
1072 unsigned u[4];
1073 uint64_t u64[2];
1074 } fu;
1075 unsigned int i;
1076
1077 assert((nr / 2) < 3);
1078 for (i = 0; i < nr / 2; i++) {
1079 fu.u64[i] = v[i];
1080 }
1081
1082 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_UINT64);
1083 }
1084
1085 struct ureg_src
1086 ureg_DECL_immediate_int64( struct ureg_program *ureg,
1087 const int64_t *v,
1088 unsigned nr )
1089 {
1090 union {
1091 unsigned u[4];
1092 int64_t i64[2];
1093 } fu;
1094 unsigned int i;
1095
1096 assert((nr / 2) < 3);
1097 for (i = 0; i < nr / 2; i++) {
1098 fu.i64[i] = v[i];
1099 }
1100
1101 return decl_immediate(ureg, fu.u, nr, TGSI_IMM_INT64);
1102 }
1103
1104 void
1105 ureg_emit_src( struct ureg_program *ureg,
1106 struct ureg_src src )
1107 {
1108 unsigned size = 1 + (src.Indirect ? 1 : 0) +
1109 (src.Dimension ? (src.DimIndirect ? 2 : 1) : 0);
1110
1111 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
1112 unsigned n = 0;
1113
1114 assert(src.File != TGSI_FILE_NULL);
1115 assert(src.File < TGSI_FILE_COUNT);
1116
1117 out[n].value = 0;
1118 out[n].src.File = src.File;
1119 out[n].src.SwizzleX = src.SwizzleX;
1120 out[n].src.SwizzleY = src.SwizzleY;
1121 out[n].src.SwizzleZ = src.SwizzleZ;
1122 out[n].src.SwizzleW = src.SwizzleW;
1123 out[n].src.Index = src.Index;
1124 out[n].src.Negate = src.Negate;
1125 out[0].src.Absolute = src.Absolute;
1126 n++;
1127
1128 if (src.Indirect) {
1129 out[0].src.Indirect = 1;
1130 out[n].value = 0;
1131 out[n].ind.File = src.IndirectFile;
1132 out[n].ind.Swizzle = src.IndirectSwizzle;
1133 out[n].ind.Index = src.IndirectIndex;
1134 if (!ureg->supports_any_inout_decl_range &&
1135 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
1136 out[n].ind.ArrayID = 0;
1137 else
1138 out[n].ind.ArrayID = src.ArrayID;
1139 n++;
1140 }
1141
1142 if (src.Dimension) {
1143 out[0].src.Dimension = 1;
1144 out[n].dim.Dimension = 0;
1145 out[n].dim.Padding = 0;
1146 if (src.DimIndirect) {
1147 out[n].dim.Indirect = 1;
1148 out[n].dim.Index = src.DimensionIndex;
1149 n++;
1150 out[n].value = 0;
1151 out[n].ind.File = src.DimIndFile;
1152 out[n].ind.Swizzle = src.DimIndSwizzle;
1153 out[n].ind.Index = src.DimIndIndex;
1154 if (!ureg->supports_any_inout_decl_range &&
1155 (src.File == TGSI_FILE_INPUT || src.File == TGSI_FILE_OUTPUT))
1156 out[n].ind.ArrayID = 0;
1157 else
1158 out[n].ind.ArrayID = src.ArrayID;
1159 } else {
1160 out[n].dim.Indirect = 0;
1161 out[n].dim.Index = src.DimensionIndex;
1162 }
1163 n++;
1164 }
1165
1166 assert(n == size);
1167 }
1168
1169
1170 void
1171 ureg_emit_dst( struct ureg_program *ureg,
1172 struct ureg_dst dst )
1173 {
1174 unsigned size = 1 + (dst.Indirect ? 1 : 0) +
1175 (dst.Dimension ? (dst.DimIndirect ? 2 : 1) : 0);
1176
1177 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_INSN, size );
1178 unsigned n = 0;
1179
1180 assert(dst.File != TGSI_FILE_NULL);
1181 assert(dst.File != TGSI_FILE_SAMPLER);
1182 assert(dst.File != TGSI_FILE_SAMPLER_VIEW);
1183 assert(dst.File != TGSI_FILE_IMMEDIATE);
1184 assert(dst.File < TGSI_FILE_COUNT);
1185
1186 out[n].value = 0;
1187 out[n].dst.File = dst.File;
1188 out[n].dst.WriteMask = dst.WriteMask;
1189 out[n].dst.Indirect = dst.Indirect;
1190 out[n].dst.Index = dst.Index;
1191 n++;
1192
1193 if (dst.Indirect) {
1194 out[n].value = 0;
1195 out[n].ind.File = dst.IndirectFile;
1196 out[n].ind.Swizzle = dst.IndirectSwizzle;
1197 out[n].ind.Index = dst.IndirectIndex;
1198 if (!ureg->supports_any_inout_decl_range &&
1199 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
1200 out[n].ind.ArrayID = 0;
1201 else
1202 out[n].ind.ArrayID = dst.ArrayID;
1203 n++;
1204 }
1205
1206 if (dst.Dimension) {
1207 out[0].dst.Dimension = 1;
1208 out[n].dim.Dimension = 0;
1209 out[n].dim.Padding = 0;
1210 if (dst.DimIndirect) {
1211 out[n].dim.Indirect = 1;
1212 out[n].dim.Index = dst.DimensionIndex;
1213 n++;
1214 out[n].value = 0;
1215 out[n].ind.File = dst.DimIndFile;
1216 out[n].ind.Swizzle = dst.DimIndSwizzle;
1217 out[n].ind.Index = dst.DimIndIndex;
1218 if (!ureg->supports_any_inout_decl_range &&
1219 (dst.File == TGSI_FILE_INPUT || dst.File == TGSI_FILE_OUTPUT))
1220 out[n].ind.ArrayID = 0;
1221 else
1222 out[n].ind.ArrayID = dst.ArrayID;
1223 } else {
1224 out[n].dim.Indirect = 0;
1225 out[n].dim.Index = dst.DimensionIndex;
1226 }
1227 n++;
1228 }
1229
1230 assert(n == size);
1231 }
1232
1233
1234 static void validate( unsigned opcode,
1235 unsigned nr_dst,
1236 unsigned nr_src )
1237 {
1238 #ifdef DEBUG
1239 const struct tgsi_opcode_info *info = tgsi_get_opcode_info( opcode );
1240 assert(info);
1241 if (info) {
1242 assert(nr_dst == info->num_dst);
1243 assert(nr_src == info->num_src);
1244 }
1245 #endif
1246 }
1247
1248 struct ureg_emit_insn_result
1249 ureg_emit_insn(struct ureg_program *ureg,
1250 unsigned opcode,
1251 boolean saturate,
1252 unsigned precise,
1253 unsigned num_dst,
1254 unsigned num_src)
1255 {
1256 union tgsi_any_token *out;
1257 uint count = 1;
1258 struct ureg_emit_insn_result result;
1259
1260 validate( opcode, num_dst, num_src );
1261
1262 out = get_tokens( ureg, DOMAIN_INSN, count );
1263 out[0].insn = tgsi_default_instruction();
1264 out[0].insn.Opcode = opcode;
1265 out[0].insn.Saturate = saturate;
1266 out[0].insn.Precise = precise;
1267 out[0].insn.NumDstRegs = num_dst;
1268 out[0].insn.NumSrcRegs = num_src;
1269
1270 result.insn_token = ureg->domain[DOMAIN_INSN].count - count;
1271 result.extended_token = result.insn_token;
1272
1273 ureg->nr_instructions++;
1274
1275 return result;
1276 }
1277
1278
1279 /**
1280 * Emit a label token.
1281 * \param label_token returns a token number indicating where the label
1282 * needs to be patched later. Later, this value should be passed to the
1283 * ureg_fixup_label() function.
1284 */
1285 void
1286 ureg_emit_label(struct ureg_program *ureg,
1287 unsigned extended_token,
1288 unsigned *label_token )
1289 {
1290 union tgsi_any_token *out, *insn;
1291
1292 if (!label_token)
1293 return;
1294
1295 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1296 out[0].value = 0;
1297
1298 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1299 insn->insn.Label = 1;
1300
1301 *label_token = ureg->domain[DOMAIN_INSN].count - 1;
1302 }
1303
1304 /* Will return a number which can be used in a label to point to the
1305 * next instruction to be emitted.
1306 */
1307 unsigned
1308 ureg_get_instruction_number( struct ureg_program *ureg )
1309 {
1310 return ureg->nr_instructions;
1311 }
1312
1313 /* Patch a given label (expressed as a token number) to point to a
1314 * given instruction (expressed as an instruction number).
1315 */
1316 void
1317 ureg_fixup_label(struct ureg_program *ureg,
1318 unsigned label_token,
1319 unsigned instruction_number )
1320 {
1321 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, label_token );
1322
1323 out->insn_label.Label = instruction_number;
1324 }
1325
1326
1327 void
1328 ureg_emit_texture(struct ureg_program *ureg,
1329 unsigned extended_token,
1330 enum tgsi_texture_type target,
1331 enum tgsi_return_type return_type, unsigned num_offsets)
1332 {
1333 union tgsi_any_token *out, *insn;
1334
1335 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1336 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1337
1338 insn->insn.Texture = 1;
1339
1340 out[0].value = 0;
1341 out[0].insn_texture.Texture = target;
1342 out[0].insn_texture.NumOffsets = num_offsets;
1343 out[0].insn_texture.ReturnType = return_type;
1344 }
1345
1346 void
1347 ureg_emit_texture_offset(struct ureg_program *ureg,
1348 const struct tgsi_texture_offset *offset)
1349 {
1350 union tgsi_any_token *out;
1351
1352 out = get_tokens( ureg, DOMAIN_INSN, 1);
1353
1354 out[0].value = 0;
1355 out[0].insn_texture_offset = *offset;
1356
1357 }
1358
1359 void
1360 ureg_emit_memory(struct ureg_program *ureg,
1361 unsigned extended_token,
1362 unsigned qualifier,
1363 unsigned texture,
1364 unsigned format)
1365 {
1366 union tgsi_any_token *out, *insn;
1367
1368 out = get_tokens( ureg, DOMAIN_INSN, 1 );
1369 insn = retrieve_token( ureg, DOMAIN_INSN, extended_token );
1370
1371 insn->insn.Memory = 1;
1372
1373 out[0].value = 0;
1374 out[0].insn_memory.Qualifier = qualifier;
1375 out[0].insn_memory.Texture = texture;
1376 out[0].insn_memory.Format = format;
1377 }
1378
1379 void
1380 ureg_fixup_insn_size(struct ureg_program *ureg,
1381 unsigned insn )
1382 {
1383 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_INSN, insn );
1384
1385 assert(out->insn.Type == TGSI_TOKEN_TYPE_INSTRUCTION);
1386 out->insn.NrTokens = ureg->domain[DOMAIN_INSN].count - insn - 1;
1387 }
1388
1389
1390 void
1391 ureg_insn(struct ureg_program *ureg,
1392 unsigned opcode,
1393 const struct ureg_dst *dst,
1394 unsigned nr_dst,
1395 const struct ureg_src *src,
1396 unsigned nr_src,
1397 unsigned precise )
1398 {
1399 struct ureg_emit_insn_result insn;
1400 unsigned i;
1401 boolean saturate;
1402
1403 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1404 return;
1405 }
1406
1407 saturate = nr_dst ? dst[0].Saturate : FALSE;
1408
1409 insn = ureg_emit_insn(ureg,
1410 opcode,
1411 saturate,
1412 precise,
1413 nr_dst,
1414 nr_src);
1415
1416 for (i = 0; i < nr_dst; i++)
1417 ureg_emit_dst( ureg, dst[i] );
1418
1419 for (i = 0; i < nr_src; i++)
1420 ureg_emit_src( ureg, src[i] );
1421
1422 ureg_fixup_insn_size( ureg, insn.insn_token );
1423 }
1424
1425 void
1426 ureg_tex_insn(struct ureg_program *ureg,
1427 unsigned opcode,
1428 const struct ureg_dst *dst,
1429 unsigned nr_dst,
1430 enum tgsi_texture_type target,
1431 enum tgsi_return_type return_type,
1432 const struct tgsi_texture_offset *texoffsets,
1433 unsigned nr_offset,
1434 const struct ureg_src *src,
1435 unsigned nr_src )
1436 {
1437 struct ureg_emit_insn_result insn;
1438 unsigned i;
1439 boolean saturate;
1440
1441 if (nr_dst && ureg_dst_is_empty(dst[0])) {
1442 return;
1443 }
1444
1445 saturate = nr_dst ? dst[0].Saturate : FALSE;
1446
1447 insn = ureg_emit_insn(ureg,
1448 opcode,
1449 saturate,
1450 0,
1451 nr_dst,
1452 nr_src);
1453
1454 ureg_emit_texture( ureg, insn.extended_token, target, return_type,
1455 nr_offset );
1456
1457 for (i = 0; i < nr_offset; i++)
1458 ureg_emit_texture_offset( ureg, &texoffsets[i]);
1459
1460 for (i = 0; i < nr_dst; i++)
1461 ureg_emit_dst( ureg, dst[i] );
1462
1463 for (i = 0; i < nr_src; i++)
1464 ureg_emit_src( ureg, src[i] );
1465
1466 ureg_fixup_insn_size( ureg, insn.insn_token );
1467 }
1468
1469
1470 void
1471 ureg_memory_insn(struct ureg_program *ureg,
1472 unsigned opcode,
1473 const struct ureg_dst *dst,
1474 unsigned nr_dst,
1475 const struct ureg_src *src,
1476 unsigned nr_src,
1477 unsigned qualifier,
1478 unsigned texture,
1479 unsigned format)
1480 {
1481 struct ureg_emit_insn_result insn;
1482 unsigned i;
1483
1484 insn = ureg_emit_insn(ureg,
1485 opcode,
1486 FALSE,
1487 0,
1488 nr_dst,
1489 nr_src);
1490
1491 ureg_emit_memory(ureg, insn.extended_token, qualifier, texture, format);
1492
1493 for (i = 0; i < nr_dst; i++)
1494 ureg_emit_dst(ureg, dst[i]);
1495
1496 for (i = 0; i < nr_src; i++)
1497 ureg_emit_src(ureg, src[i]);
1498
1499 ureg_fixup_insn_size(ureg, insn.insn_token);
1500 }
1501
1502
1503 static void
1504 emit_decl_semantic(struct ureg_program *ureg,
1505 unsigned file,
1506 unsigned first,
1507 unsigned last,
1508 unsigned semantic_name,
1509 unsigned semantic_index,
1510 unsigned streams,
1511 unsigned usage_mask,
1512 unsigned array_id)
1513 {
1514 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3);
1515
1516 out[0].value = 0;
1517 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1518 out[0].decl.NrTokens = 3;
1519 out[0].decl.File = file;
1520 out[0].decl.UsageMask = usage_mask;
1521 out[0].decl.Semantic = 1;
1522 out[0].decl.Array = array_id != 0;
1523
1524 out[1].value = 0;
1525 out[1].decl_range.First = first;
1526 out[1].decl_range.Last = last;
1527
1528 out[2].value = 0;
1529 out[2].decl_semantic.Name = semantic_name;
1530 out[2].decl_semantic.Index = semantic_index;
1531 out[2].decl_semantic.StreamX = streams & 3;
1532 out[2].decl_semantic.StreamY = (streams >> 2) & 3;
1533 out[2].decl_semantic.StreamZ = (streams >> 4) & 3;
1534 out[2].decl_semantic.StreamW = (streams >> 6) & 3;
1535
1536 if (array_id) {
1537 out[3].value = 0;
1538 out[3].array.ArrayID = array_id;
1539 }
1540 }
1541
1542 static void
1543 emit_decl_atomic_2d(struct ureg_program *ureg,
1544 unsigned first,
1545 unsigned last,
1546 unsigned index2D,
1547 unsigned array_id)
1548 {
1549 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, array_id ? 4 : 3);
1550
1551 out[0].value = 0;
1552 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1553 out[0].decl.NrTokens = 3;
1554 out[0].decl.File = TGSI_FILE_HW_ATOMIC;
1555 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1556 out[0].decl.Dimension = 1;
1557 out[0].decl.Array = array_id != 0;
1558
1559 out[1].value = 0;
1560 out[1].decl_range.First = first;
1561 out[1].decl_range.Last = last;
1562
1563 out[2].value = 0;
1564 out[2].decl_dim.Index2D = index2D;
1565
1566 if (array_id) {
1567 out[3].value = 0;
1568 out[3].array.ArrayID = array_id;
1569 }
1570 }
1571
1572 static void
1573 emit_decl_fs(struct ureg_program *ureg,
1574 unsigned file,
1575 unsigned first,
1576 unsigned last,
1577 unsigned semantic_name,
1578 unsigned semantic_index,
1579 enum tgsi_interpolate_mode interpolate,
1580 unsigned cylindrical_wrap,
1581 enum tgsi_interpolate_loc interpolate_location,
1582 unsigned array_id,
1583 unsigned usage_mask)
1584 {
1585 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL,
1586 array_id ? 5 : 4);
1587
1588 out[0].value = 0;
1589 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1590 out[0].decl.NrTokens = 4;
1591 out[0].decl.File = file;
1592 out[0].decl.UsageMask = usage_mask;
1593 out[0].decl.Interpolate = 1;
1594 out[0].decl.Semantic = 1;
1595 out[0].decl.Array = array_id != 0;
1596
1597 out[1].value = 0;
1598 out[1].decl_range.First = first;
1599 out[1].decl_range.Last = last;
1600
1601 out[2].value = 0;
1602 out[2].decl_interp.Interpolate = interpolate;
1603 out[2].decl_interp.CylindricalWrap = cylindrical_wrap;
1604 out[2].decl_interp.Location = interpolate_location;
1605
1606 out[3].value = 0;
1607 out[3].decl_semantic.Name = semantic_name;
1608 out[3].decl_semantic.Index = semantic_index;
1609
1610 if (array_id) {
1611 out[4].value = 0;
1612 out[4].array.ArrayID = array_id;
1613 }
1614 }
1615
1616 static void
1617 emit_decl_temps( struct ureg_program *ureg,
1618 unsigned first, unsigned last,
1619 boolean local,
1620 unsigned arrayid )
1621 {
1622 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL,
1623 arrayid ? 3 : 2 );
1624
1625 out[0].value = 0;
1626 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1627 out[0].decl.NrTokens = 2;
1628 out[0].decl.File = TGSI_FILE_TEMPORARY;
1629 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1630 out[0].decl.Local = local;
1631
1632 out[1].value = 0;
1633 out[1].decl_range.First = first;
1634 out[1].decl_range.Last = last;
1635
1636 if (arrayid) {
1637 out[0].decl.Array = 1;
1638 out[2].value = 0;
1639 out[2].array.ArrayID = arrayid;
1640 }
1641 }
1642
1643 static void emit_decl_range( struct ureg_program *ureg,
1644 unsigned file,
1645 unsigned first,
1646 unsigned count )
1647 {
1648 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
1649
1650 out[0].value = 0;
1651 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1652 out[0].decl.NrTokens = 2;
1653 out[0].decl.File = file;
1654 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1655 out[0].decl.Semantic = 0;
1656
1657 out[1].value = 0;
1658 out[1].decl_range.First = first;
1659 out[1].decl_range.Last = first + count - 1;
1660 }
1661
1662 static void
1663 emit_decl_range2D(struct ureg_program *ureg,
1664 unsigned file,
1665 unsigned first,
1666 unsigned last,
1667 unsigned index2D)
1668 {
1669 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1670
1671 out[0].value = 0;
1672 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1673 out[0].decl.NrTokens = 3;
1674 out[0].decl.File = file;
1675 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1676 out[0].decl.Dimension = 1;
1677
1678 out[1].value = 0;
1679 out[1].decl_range.First = first;
1680 out[1].decl_range.Last = last;
1681
1682 out[2].value = 0;
1683 out[2].decl_dim.Index2D = index2D;
1684 }
1685
1686 static void
1687 emit_decl_sampler_view(struct ureg_program *ureg,
1688 unsigned index,
1689 enum tgsi_texture_type target,
1690 enum tgsi_return_type return_type_x,
1691 enum tgsi_return_type return_type_y,
1692 enum tgsi_return_type return_type_z,
1693 enum tgsi_return_type return_type_w )
1694 {
1695 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1696
1697 out[0].value = 0;
1698 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1699 out[0].decl.NrTokens = 3;
1700 out[0].decl.File = TGSI_FILE_SAMPLER_VIEW;
1701 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1702
1703 out[1].value = 0;
1704 out[1].decl_range.First = index;
1705 out[1].decl_range.Last = index;
1706
1707 out[2].value = 0;
1708 out[2].decl_sampler_view.Resource = target;
1709 out[2].decl_sampler_view.ReturnTypeX = return_type_x;
1710 out[2].decl_sampler_view.ReturnTypeY = return_type_y;
1711 out[2].decl_sampler_view.ReturnTypeZ = return_type_z;
1712 out[2].decl_sampler_view.ReturnTypeW = return_type_w;
1713 }
1714
1715 static void
1716 emit_decl_image(struct ureg_program *ureg,
1717 unsigned index,
1718 enum tgsi_texture_type target,
1719 unsigned format,
1720 boolean wr,
1721 boolean raw)
1722 {
1723 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 3);
1724
1725 out[0].value = 0;
1726 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1727 out[0].decl.NrTokens = 3;
1728 out[0].decl.File = TGSI_FILE_IMAGE;
1729 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1730
1731 out[1].value = 0;
1732 out[1].decl_range.First = index;
1733 out[1].decl_range.Last = index;
1734
1735 out[2].value = 0;
1736 out[2].decl_image.Resource = target;
1737 out[2].decl_image.Writable = wr;
1738 out[2].decl_image.Raw = raw;
1739 out[2].decl_image.Format = format;
1740 }
1741
1742 static void
1743 emit_decl_buffer(struct ureg_program *ureg,
1744 unsigned index,
1745 bool atomic)
1746 {
1747 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1748
1749 out[0].value = 0;
1750 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1751 out[0].decl.NrTokens = 2;
1752 out[0].decl.File = TGSI_FILE_BUFFER;
1753 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1754 out[0].decl.Atomic = atomic;
1755
1756 out[1].value = 0;
1757 out[1].decl_range.First = index;
1758 out[1].decl_range.Last = index;
1759 }
1760
1761 static void
1762 emit_decl_memory(struct ureg_program *ureg, unsigned memory_type)
1763 {
1764 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1765
1766 out[0].value = 0;
1767 out[0].decl.Type = TGSI_TOKEN_TYPE_DECLARATION;
1768 out[0].decl.NrTokens = 2;
1769 out[0].decl.File = TGSI_FILE_MEMORY;
1770 out[0].decl.UsageMask = TGSI_WRITEMASK_XYZW;
1771 out[0].decl.MemType = memory_type;
1772
1773 out[1].value = 0;
1774 out[1].decl_range.First = memory_type;
1775 out[1].decl_range.Last = memory_type;
1776 }
1777
1778 static void
1779 emit_immediate( struct ureg_program *ureg,
1780 const unsigned *v,
1781 unsigned type )
1782 {
1783 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 5 );
1784
1785 out[0].value = 0;
1786 out[0].imm.Type = TGSI_TOKEN_TYPE_IMMEDIATE;
1787 out[0].imm.NrTokens = 5;
1788 out[0].imm.DataType = type;
1789 out[0].imm.Padding = 0;
1790
1791 out[1].imm_data.Uint = v[0];
1792 out[2].imm_data.Uint = v[1];
1793 out[3].imm_data.Uint = v[2];
1794 out[4].imm_data.Uint = v[3];
1795 }
1796
1797 static void
1798 emit_property(struct ureg_program *ureg,
1799 unsigned name,
1800 unsigned data)
1801 {
1802 union tgsi_any_token *out = get_tokens(ureg, DOMAIN_DECL, 2);
1803
1804 out[0].value = 0;
1805 out[0].prop.Type = TGSI_TOKEN_TYPE_PROPERTY;
1806 out[0].prop.NrTokens = 2;
1807 out[0].prop.PropertyName = name;
1808
1809 out[1].prop_data.Data = data;
1810 }
1811
1812
1813 static void emit_decls( struct ureg_program *ureg )
1814 {
1815 unsigned i,j;
1816
1817 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++)
1818 if (ureg->properties[i] != ~0)
1819 emit_property(ureg, i, ureg->properties[i]);
1820
1821 if (ureg->processor == PIPE_SHADER_VERTEX) {
1822 for (i = 0; i < PIPE_MAX_ATTRIBS; i++) {
1823 if (ureg->vs_inputs[i/32] & (1u << (i%32))) {
1824 emit_decl_range( ureg, TGSI_FILE_INPUT, i, 1 );
1825 }
1826 }
1827 } else if (ureg->processor == PIPE_SHADER_FRAGMENT) {
1828 if (ureg->supports_any_inout_decl_range) {
1829 for (i = 0; i < ureg->nr_inputs; i++) {
1830 emit_decl_fs(ureg,
1831 TGSI_FILE_INPUT,
1832 ureg->input[i].first,
1833 ureg->input[i].last,
1834 ureg->input[i].semantic_name,
1835 ureg->input[i].semantic_index,
1836 ureg->input[i].interp,
1837 ureg->input[i].cylindrical_wrap,
1838 ureg->input[i].interp_location,
1839 ureg->input[i].array_id,
1840 ureg->input[i].usage_mask);
1841 }
1842 }
1843 else {
1844 for (i = 0; i < ureg->nr_inputs; i++) {
1845 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1846 emit_decl_fs(ureg,
1847 TGSI_FILE_INPUT,
1848 j, j,
1849 ureg->input[i].semantic_name,
1850 ureg->input[i].semantic_index +
1851 (j - ureg->input[i].first),
1852 ureg->input[i].interp,
1853 ureg->input[i].cylindrical_wrap,
1854 ureg->input[i].interp_location, 0,
1855 ureg->input[i].usage_mask);
1856 }
1857 }
1858 }
1859 } else {
1860 if (ureg->supports_any_inout_decl_range) {
1861 for (i = 0; i < ureg->nr_inputs; i++) {
1862 emit_decl_semantic(ureg,
1863 TGSI_FILE_INPUT,
1864 ureg->input[i].first,
1865 ureg->input[i].last,
1866 ureg->input[i].semantic_name,
1867 ureg->input[i].semantic_index,
1868 0,
1869 TGSI_WRITEMASK_XYZW,
1870 ureg->input[i].array_id);
1871 }
1872 }
1873 else {
1874 for (i = 0; i < ureg->nr_inputs; i++) {
1875 for (j = ureg->input[i].first; j <= ureg->input[i].last; j++) {
1876 emit_decl_semantic(ureg,
1877 TGSI_FILE_INPUT,
1878 j, j,
1879 ureg->input[i].semantic_name,
1880 ureg->input[i].semantic_index +
1881 (j - ureg->input[i].first),
1882 0,
1883 TGSI_WRITEMASK_XYZW, 0);
1884 }
1885 }
1886 }
1887 }
1888
1889 for (i = 0; i < ureg->nr_system_values; i++) {
1890 emit_decl_semantic(ureg,
1891 TGSI_FILE_SYSTEM_VALUE,
1892 i,
1893 i,
1894 ureg->system_value[i].semantic_name,
1895 ureg->system_value[i].semantic_index,
1896 0,
1897 TGSI_WRITEMASK_XYZW, 0);
1898 }
1899
1900 if (ureg->supports_any_inout_decl_range) {
1901 for (i = 0; i < ureg->nr_outputs; i++) {
1902 emit_decl_semantic(ureg,
1903 TGSI_FILE_OUTPUT,
1904 ureg->output[i].first,
1905 ureg->output[i].last,
1906 ureg->output[i].semantic_name,
1907 ureg->output[i].semantic_index,
1908 ureg->output[i].streams,
1909 ureg->output[i].usage_mask,
1910 ureg->output[i].array_id);
1911 }
1912 }
1913 else {
1914 for (i = 0; i < ureg->nr_outputs; i++) {
1915 for (j = ureg->output[i].first; j <= ureg->output[i].last; j++) {
1916 emit_decl_semantic(ureg,
1917 TGSI_FILE_OUTPUT,
1918 j, j,
1919 ureg->output[i].semantic_name,
1920 ureg->output[i].semantic_index +
1921 (j - ureg->output[i].first),
1922 ureg->output[i].streams,
1923 ureg->output[i].usage_mask, 0);
1924 }
1925 }
1926 }
1927
1928 for (i = 0; i < ureg->nr_samplers; i++) {
1929 emit_decl_range( ureg,
1930 TGSI_FILE_SAMPLER,
1931 ureg->sampler[i].Index, 1 );
1932 }
1933
1934 for (i = 0; i < ureg->nr_sampler_views; i++) {
1935 emit_decl_sampler_view(ureg,
1936 ureg->sampler_view[i].index,
1937 ureg->sampler_view[i].target,
1938 ureg->sampler_view[i].return_type_x,
1939 ureg->sampler_view[i].return_type_y,
1940 ureg->sampler_view[i].return_type_z,
1941 ureg->sampler_view[i].return_type_w);
1942 }
1943
1944 for (i = 0; i < ureg->nr_images; i++) {
1945 emit_decl_image(ureg,
1946 ureg->image[i].index,
1947 ureg->image[i].target,
1948 ureg->image[i].format,
1949 ureg->image[i].wr,
1950 ureg->image[i].raw);
1951 }
1952
1953 for (i = 0; i < ureg->nr_buffers; i++) {
1954 emit_decl_buffer(ureg, ureg->buffer[i].index, ureg->buffer[i].atomic);
1955 }
1956
1957 for (i = 0; i < TGSI_MEMORY_TYPE_COUNT; i++) {
1958 if (ureg->use_memory[i])
1959 emit_decl_memory(ureg, i);
1960 }
1961
1962 for (i = 0; i < PIPE_MAX_CONSTANT_BUFFERS; i++) {
1963 struct const_decl *decl = &ureg->const_decls[i];
1964
1965 if (decl->nr_constant_ranges) {
1966 uint j;
1967
1968 for (j = 0; j < decl->nr_constant_ranges; j++) {
1969 emit_decl_range2D(ureg,
1970 TGSI_FILE_CONSTANT,
1971 decl->constant_range[j].first,
1972 decl->constant_range[j].last,
1973 i);
1974 }
1975 }
1976 }
1977
1978 for (i = 0; i < PIPE_MAX_HW_ATOMIC_BUFFERS; i++) {
1979 struct hw_atomic_decl *decl = &ureg->hw_atomic_decls[i];
1980
1981 if (decl->nr_hw_atomic_ranges) {
1982 uint j;
1983
1984 for (j = 0; j < decl->nr_hw_atomic_ranges; j++) {
1985 emit_decl_atomic_2d(ureg,
1986 decl->hw_atomic_range[j].first,
1987 decl->hw_atomic_range[j].last,
1988 i,
1989 decl->hw_atomic_range[j].array_id);
1990 }
1991 }
1992 }
1993
1994 if (ureg->nr_temps) {
1995 unsigned array = 0;
1996 for (i = 0; i < ureg->nr_temps;) {
1997 boolean local = util_bitmask_get(ureg->local_temps, i);
1998 unsigned first = i;
1999 i = util_bitmask_get_next_index(ureg->decl_temps, i + 1);
2000 if (i == UTIL_BITMASK_INVALID_INDEX)
2001 i = ureg->nr_temps;
2002
2003 if (array < ureg->nr_array_temps && ureg->array_temps[array] == first)
2004 emit_decl_temps( ureg, first, i - 1, local, ++array );
2005 else
2006 emit_decl_temps( ureg, first, i - 1, local, 0 );
2007 }
2008 }
2009
2010 if (ureg->nr_addrs) {
2011 emit_decl_range( ureg,
2012 TGSI_FILE_ADDRESS,
2013 0, ureg->nr_addrs );
2014 }
2015
2016 for (i = 0; i < ureg->nr_immediates; i++) {
2017 emit_immediate( ureg,
2018 ureg->immediate[i].value.u,
2019 ureg->immediate[i].type );
2020 }
2021 }
2022
2023 /* Append the instruction tokens onto the declarations to build a
2024 * contiguous stream suitable to send to the driver.
2025 */
2026 static void copy_instructions( struct ureg_program *ureg )
2027 {
2028 unsigned nr_tokens = ureg->domain[DOMAIN_INSN].count;
2029 union tgsi_any_token *out = get_tokens( ureg,
2030 DOMAIN_DECL,
2031 nr_tokens );
2032
2033 memcpy(out,
2034 ureg->domain[DOMAIN_INSN].tokens,
2035 nr_tokens * sizeof out[0] );
2036 }
2037
2038
2039 static void
2040 fixup_header_size(struct ureg_program *ureg)
2041 {
2042 union tgsi_any_token *out = retrieve_token( ureg, DOMAIN_DECL, 0 );
2043
2044 out->header.BodySize = ureg->domain[DOMAIN_DECL].count - 2;
2045 }
2046
2047
2048 static void
2049 emit_header( struct ureg_program *ureg )
2050 {
2051 union tgsi_any_token *out = get_tokens( ureg, DOMAIN_DECL, 2 );
2052
2053 out[0].header.HeaderSize = 2;
2054 out[0].header.BodySize = 0;
2055
2056 out[1].processor.Processor = ureg->processor;
2057 out[1].processor.Padding = 0;
2058 }
2059
2060
2061 const struct tgsi_token *ureg_finalize( struct ureg_program *ureg )
2062 {
2063 const struct tgsi_token *tokens;
2064
2065 switch (ureg->processor) {
2066 case PIPE_SHADER_VERTEX:
2067 case PIPE_SHADER_TESS_EVAL:
2068 ureg_property(ureg, TGSI_PROPERTY_NEXT_SHADER,
2069 ureg->next_shader_processor == -1 ?
2070 PIPE_SHADER_FRAGMENT :
2071 ureg->next_shader_processor);
2072 break;
2073 }
2074
2075 emit_header( ureg );
2076 emit_decls( ureg );
2077 copy_instructions( ureg );
2078 fixup_header_size( ureg );
2079
2080 if (ureg->domain[0].tokens == error_tokens ||
2081 ureg->domain[1].tokens == error_tokens) {
2082 debug_printf("%s: error in generated shader\n", __FUNCTION__);
2083 assert(0);
2084 return NULL;
2085 }
2086
2087 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
2088
2089 if (0) {
2090 debug_printf("%s: emitted shader %d tokens:\n", __FUNCTION__,
2091 ureg->domain[DOMAIN_DECL].count);
2092 tgsi_dump( tokens, 0 );
2093 }
2094
2095 #if DEBUG
2096 if (tokens && !tgsi_sanity_check(tokens)) {
2097 debug_printf("tgsi_ureg.c, sanity check failed on generated tokens:\n");
2098 tgsi_dump(tokens, 0);
2099 assert(0);
2100 }
2101 #endif
2102
2103
2104 return tokens;
2105 }
2106
2107
2108 void *ureg_create_shader( struct ureg_program *ureg,
2109 struct pipe_context *pipe,
2110 const struct pipe_stream_output_info *so )
2111 {
2112 struct pipe_shader_state state;
2113
2114 pipe_shader_state_from_tgsi(&state, ureg_finalize(ureg));
2115 if(!state.tokens)
2116 return NULL;
2117
2118 if (so)
2119 state.stream_output = *so;
2120
2121 switch (ureg->processor) {
2122 case PIPE_SHADER_VERTEX:
2123 return pipe->create_vs_state(pipe, &state);
2124 case PIPE_SHADER_TESS_CTRL:
2125 return pipe->create_tcs_state(pipe, &state);
2126 case PIPE_SHADER_TESS_EVAL:
2127 return pipe->create_tes_state(pipe, &state);
2128 case PIPE_SHADER_GEOMETRY:
2129 return pipe->create_gs_state(pipe, &state);
2130 case PIPE_SHADER_FRAGMENT:
2131 return pipe->create_fs_state(pipe, &state);
2132 default:
2133 return NULL;
2134 }
2135 }
2136
2137
2138 const struct tgsi_token *ureg_get_tokens( struct ureg_program *ureg,
2139 unsigned *nr_tokens )
2140 {
2141 const struct tgsi_token *tokens;
2142
2143 ureg_finalize(ureg);
2144
2145 tokens = &ureg->domain[DOMAIN_DECL].tokens[0].token;
2146
2147 if (nr_tokens)
2148 *nr_tokens = ureg->domain[DOMAIN_DECL].count;
2149
2150 ureg->domain[DOMAIN_DECL].tokens = 0;
2151 ureg->domain[DOMAIN_DECL].size = 0;
2152 ureg->domain[DOMAIN_DECL].order = 0;
2153 ureg->domain[DOMAIN_DECL].count = 0;
2154
2155 return tokens;
2156 }
2157
2158
2159 void ureg_free_tokens( const struct tgsi_token *tokens )
2160 {
2161 FREE((struct tgsi_token *)tokens);
2162 }
2163
2164
2165 struct ureg_program *
2166 ureg_create(unsigned processor)
2167 {
2168 return ureg_create_with_screen(processor, NULL);
2169 }
2170
2171
2172 struct ureg_program *
2173 ureg_create_with_screen(unsigned processor, struct pipe_screen *screen)
2174 {
2175 int i;
2176 struct ureg_program *ureg = CALLOC_STRUCT( ureg_program );
2177 if (!ureg)
2178 goto no_ureg;
2179
2180 ureg->processor = processor;
2181 ureg->supports_any_inout_decl_range =
2182 screen &&
2183 screen->get_shader_param(screen, processor,
2184 PIPE_SHADER_CAP_TGSI_ANY_INOUT_DECL_RANGE) != 0;
2185 ureg->next_shader_processor = -1;
2186
2187 for (i = 0; i < ARRAY_SIZE(ureg->properties); i++)
2188 ureg->properties[i] = ~0;
2189
2190 ureg->free_temps = util_bitmask_create();
2191 if (ureg->free_temps == NULL)
2192 goto no_free_temps;
2193
2194 ureg->local_temps = util_bitmask_create();
2195 if (ureg->local_temps == NULL)
2196 goto no_local_temps;
2197
2198 ureg->decl_temps = util_bitmask_create();
2199 if (ureg->decl_temps == NULL)
2200 goto no_decl_temps;
2201
2202 return ureg;
2203
2204 no_decl_temps:
2205 util_bitmask_destroy(ureg->local_temps);
2206 no_local_temps:
2207 util_bitmask_destroy(ureg->free_temps);
2208 no_free_temps:
2209 FREE(ureg);
2210 no_ureg:
2211 return NULL;
2212 }
2213
2214
2215 void
2216 ureg_set_next_shader_processor(struct ureg_program *ureg, unsigned processor)
2217 {
2218 ureg->next_shader_processor = processor;
2219 }
2220
2221
2222 unsigned
2223 ureg_get_nr_outputs( const struct ureg_program *ureg )
2224 {
2225 if (!ureg)
2226 return 0;
2227 return ureg->nr_outputs;
2228 }
2229
2230
2231 void ureg_destroy( struct ureg_program *ureg )
2232 {
2233 unsigned i;
2234
2235 for (i = 0; i < ARRAY_SIZE(ureg->domain); i++) {
2236 if (ureg->domain[i].tokens &&
2237 ureg->domain[i].tokens != error_tokens)
2238 FREE(ureg->domain[i].tokens);
2239 }
2240
2241 util_bitmask_destroy(ureg->free_temps);
2242 util_bitmask_destroy(ureg->local_temps);
2243 util_bitmask_destroy(ureg->decl_temps);
2244
2245 FREE(ureg);
2246 }