spirv: Make vtn_function a vtn_cf_node
[mesa.git] / src / compiler / spirv / vtn_private.h
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jason Ekstrand (jason@jlekstrand.net)
25 *
26 */
27
28 #ifndef _VTN_PRIVATE_H_
29 #define _VTN_PRIVATE_H_
30
31 #include <setjmp.h>
32
33 #include "nir/nir.h"
34 #include "nir/nir_builder.h"
35 #include "util/u_dynarray.h"
36 #include "nir_spirv.h"
37 #include "spirv.h"
38
39 struct vtn_builder;
40 struct vtn_decoration;
41
42 void vtn_log(struct vtn_builder *b, enum nir_spirv_debug_level level,
43 size_t spirv_offset, const char *message);
44
45 void vtn_logf(struct vtn_builder *b, enum nir_spirv_debug_level level,
46 size_t spirv_offset, const char *fmt, ...) PRINTFLIKE(4, 5);
47
48 #define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
49
50 void _vtn_warn(struct vtn_builder *b, const char *file, unsigned line,
51 const char *fmt, ...) PRINTFLIKE(4, 5);
52 #define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
53
54 void _vtn_err(struct vtn_builder *b, const char *file, unsigned line,
55 const char *fmt, ...) PRINTFLIKE(4, 5);
56 #define vtn_err(...) _vtn_err(b, __FILE__, __LINE__, __VA_ARGS__)
57
58 /** Fail SPIR-V parsing
59 *
60 * This function logs an error and then bails out of the shader compile using
61 * longjmp. This being safe relies on two things:
62 *
63 * 1) We must guarantee that setjmp is called after allocating the builder
64 * and setting up b->debug (so that logging works) but before before any
65 * errors have a chance to occur.
66 *
67 * 2) While doing the SPIR-V -> NIR conversion, we need to be careful to
68 * ensure that all heap allocations happen through ralloc and are parented
69 * to the builder. This way they will get properly cleaned up on error.
70 *
71 * 3) We must ensure that _vtn_fail is never called while a mutex lock or a
72 * reference to any other resource is held with the exception of ralloc
73 * objects which are parented to the builder.
74 *
75 * So long as these two things continue to hold, we can easily longjmp back to
76 * spirv_to_nir(), clean up the builder, and return NULL.
77 */
78 NORETURN void
79 _vtn_fail(struct vtn_builder *b, const char *file, unsigned line,
80 const char *fmt, ...) PRINTFLIKE(4, 5);
81
82 #define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
83
84 /** Fail if the given expression evaluates to true */
85 #define vtn_fail_if(expr, ...) \
86 do { \
87 if (unlikely(expr)) \
88 vtn_fail(__VA_ARGS__); \
89 } while (0)
90
91 #define _vtn_fail_with(t, msg, v) \
92 vtn_fail("%s: %s (%u)\n", msg, spirv_ ## t ## _to_string(v), v)
93
94 #define vtn_fail_with_decoration(msg, v) _vtn_fail_with(decoration, msg, v)
95 #define vtn_fail_with_opcode(msg, v) _vtn_fail_with(op, msg, v)
96
97 /** Assert that a condition is true and, if it isn't, vtn_fail
98 *
99 * This macro is transitional only and should not be used in new code. Use
100 * vtn_fail_if and provide a real message instead.
101 */
102 #define vtn_assert(expr) \
103 do { \
104 if (!likely(expr)) \
105 vtn_fail("%s", #expr); \
106 } while (0)
107
108 enum vtn_value_type {
109 vtn_value_type_invalid = 0,
110 vtn_value_type_undef,
111 vtn_value_type_string,
112 vtn_value_type_decoration_group,
113 vtn_value_type_type,
114 vtn_value_type_constant,
115 vtn_value_type_pointer,
116 vtn_value_type_function,
117 vtn_value_type_block,
118 vtn_value_type_ssa,
119 vtn_value_type_extension,
120 vtn_value_type_image_pointer,
121 vtn_value_type_sampled_image,
122 };
123
124 enum vtn_branch_type {
125 vtn_branch_type_none,
126 vtn_branch_type_switch_break,
127 vtn_branch_type_switch_fallthrough,
128 vtn_branch_type_loop_break,
129 vtn_branch_type_loop_continue,
130 vtn_branch_type_discard,
131 vtn_branch_type_return,
132 };
133
134 enum vtn_cf_node_type {
135 vtn_cf_node_type_block,
136 vtn_cf_node_type_if,
137 vtn_cf_node_type_loop,
138 vtn_cf_node_type_case,
139 vtn_cf_node_type_switch,
140 vtn_cf_node_type_function,
141 };
142
143 struct vtn_cf_node {
144 struct list_head link;
145 enum vtn_cf_node_type type;
146 };
147
148 struct vtn_loop {
149 struct vtn_cf_node node;
150
151 /* The main body of the loop */
152 struct list_head body;
153
154 /* The "continue" part of the loop. This gets executed after the body
155 * and is where you go when you hit a continue.
156 */
157 struct list_head cont_body;
158
159 SpvLoopControlMask control;
160 };
161
162 struct vtn_if {
163 struct vtn_cf_node node;
164
165 uint32_t condition;
166
167 enum vtn_branch_type then_type;
168 struct list_head then_body;
169
170 enum vtn_branch_type else_type;
171 struct list_head else_body;
172
173 SpvSelectionControlMask control;
174 };
175
176 struct vtn_case {
177 struct vtn_cf_node node;
178
179 struct list_head body;
180
181 /* The block that starts this case */
182 struct vtn_block *start_block;
183
184 /* The fallthrough case, if any */
185 struct vtn_case *fallthrough;
186
187 /* The uint32_t values that map to this case */
188 struct util_dynarray values;
189
190 /* True if this is the default case */
191 bool is_default;
192
193 /* Initialized to false; used when sorting the list of cases */
194 bool visited;
195 };
196
197 struct vtn_switch {
198 struct vtn_cf_node node;
199
200 uint32_t selector;
201
202 struct list_head cases;
203 };
204
205 struct vtn_block {
206 struct vtn_cf_node node;
207
208 /** A pointer to the label instruction */
209 const uint32_t *label;
210
211 /** A pointer to the merge instruction (or NULL if non exists) */
212 const uint32_t *merge;
213
214 /** A pointer to the branch instruction that ends this block */
215 const uint32_t *branch;
216
217 enum vtn_branch_type branch_type;
218
219 /** Points to the loop that this block starts (if it starts a loop) */
220 struct vtn_loop *loop;
221
222 /** Points to the switch case started by this block (if any) */
223 struct vtn_case *switch_case;
224
225 /** Every block ends in a nop intrinsic so that we can find it again */
226 nir_intrinsic_instr *end_nop;
227 };
228
229 struct vtn_function {
230 struct vtn_cf_node node;
231
232 struct vtn_type *type;
233
234 bool referenced;
235 bool emitted;
236
237 nir_function_impl *impl;
238 struct vtn_block *start_block;
239
240 struct list_head body;
241
242 const uint32_t *end;
243
244 SpvFunctionControlMask control;
245 };
246
247 #define VTN_DECL_CF_NODE_CAST(_type) \
248 static inline struct vtn_##_type * \
249 vtn_cf_node_as_##_type(struct vtn_cf_node *node) \
250 { \
251 assert(node->type == vtn_cf_node_type_##_type); \
252 return (struct vtn_##_type *)node; \
253 }
254
255 VTN_DECL_CF_NODE_CAST(block)
256 VTN_DECL_CF_NODE_CAST(loop)
257 VTN_DECL_CF_NODE_CAST(if)
258 VTN_DECL_CF_NODE_CAST(case)
259 VTN_DECL_CF_NODE_CAST(switch)
260 VTN_DECL_CF_NODE_CAST(function)
261
262 #define vtn_foreach_cf_node(node, cf_list) \
263 list_for_each_entry(struct vtn_cf_node, node, cf_list, link)
264
265 typedef bool (*vtn_instruction_handler)(struct vtn_builder *, SpvOp,
266 const uint32_t *, unsigned);
267
268 void vtn_build_cfg(struct vtn_builder *b, const uint32_t *words,
269 const uint32_t *end);
270 void vtn_function_emit(struct vtn_builder *b, struct vtn_function *func,
271 vtn_instruction_handler instruction_handler);
272 void vtn_handle_function_call(struct vtn_builder *b, SpvOp opcode,
273 const uint32_t *w, unsigned count);
274
275 const uint32_t *
276 vtn_foreach_instruction(struct vtn_builder *b, const uint32_t *start,
277 const uint32_t *end, vtn_instruction_handler handler);
278
279 struct vtn_ssa_value {
280 union {
281 nir_ssa_def *def;
282 struct vtn_ssa_value **elems;
283 };
284
285 /* For matrices, if this is non-NULL, then this value is actually the
286 * transpose of some other value. The value that `transposed` points to
287 * always dominates this value.
288 */
289 struct vtn_ssa_value *transposed;
290
291 const struct glsl_type *type;
292
293 /* Access qualifiers */
294 enum gl_access_qualifier access;
295 };
296
297 enum vtn_base_type {
298 vtn_base_type_void,
299 vtn_base_type_scalar,
300 vtn_base_type_vector,
301 vtn_base_type_matrix,
302 vtn_base_type_array,
303 vtn_base_type_struct,
304 vtn_base_type_pointer,
305 vtn_base_type_image,
306 vtn_base_type_sampler,
307 vtn_base_type_sampled_image,
308 vtn_base_type_function,
309 };
310
311 struct vtn_type {
312 enum vtn_base_type base_type;
313
314 const struct glsl_type *type;
315
316 /* The SPIR-V id of the given type. */
317 uint32_t id;
318
319 /* Specifies the length of complex types.
320 *
321 * For Workgroup pointers, this is the size of the referenced type.
322 */
323 unsigned length;
324
325 /* for arrays, matrices and pointers, the array stride */
326 unsigned stride;
327
328 /* Access qualifiers */
329 enum gl_access_qualifier access;
330
331 union {
332 /* Members for scalar, vector, and array-like types */
333 struct {
334 /* for arrays, the vtn_type for the elements of the array */
335 struct vtn_type *array_element;
336
337 /* for matrices, whether the matrix is stored row-major */
338 bool row_major:1;
339
340 /* Whether this type, or a parent type, has been decorated as a
341 * builtin
342 */
343 bool is_builtin:1;
344
345 /* Which built-in to use */
346 SpvBuiltIn builtin;
347 };
348
349 /* Members for struct types */
350 struct {
351 /* for structures, the vtn_type for each member */
352 struct vtn_type **members;
353
354 /* for structs, the offset of each member */
355 unsigned *offsets;
356
357 /* for structs, whether it was decorated as a "non-SSBO-like" block */
358 bool block:1;
359
360 /* for structs, whether it was decorated as an "SSBO-like" block */
361 bool buffer_block:1;
362
363 /* for structs with block == true, whether this is a builtin block
364 * (i.e. a block that contains only builtins).
365 */
366 bool builtin_block:1;
367
368 /* for structs and unions it specifies the minimum alignment of the
369 * members. 0 means packed.
370 *
371 * Set by CPacked and Alignment Decorations in kernels.
372 */
373 bool packed:1;
374 };
375
376 /* Members for pointer types */
377 struct {
378 /* For pointers, the vtn_type for dereferenced type */
379 struct vtn_type *deref;
380
381 /* Storage class for pointers */
382 SpvStorageClass storage_class;
383
384 /* Required alignment for pointers */
385 uint32_t align;
386 };
387
388 /* Members for image types */
389 struct {
390 /* For images, indicates whether it's sampled or storage */
391 bool sampled;
392
393 /* Image format for image_load_store type images */
394 unsigned image_format;
395
396 /* Access qualifier for storage images */
397 SpvAccessQualifier access_qualifier;
398 };
399
400 /* Members for sampled image types */
401 struct {
402 /* For sampled images, the image type */
403 struct vtn_type *image;
404 };
405
406 /* Members for function types */
407 struct {
408 /* For functions, the vtn_type for each parameter */
409 struct vtn_type **params;
410
411 /* Return type for functions */
412 struct vtn_type *return_type;
413 };
414 };
415 };
416
417 bool vtn_type_contains_block(struct vtn_builder *b, struct vtn_type *type);
418
419 bool vtn_types_compatible(struct vtn_builder *b,
420 struct vtn_type *t1, struct vtn_type *t2);
421
422 struct vtn_type *vtn_type_without_array(struct vtn_type *type);
423
424 struct vtn_variable;
425
426 enum vtn_access_mode {
427 vtn_access_mode_id,
428 vtn_access_mode_literal,
429 };
430
431 struct vtn_access_link {
432 enum vtn_access_mode mode;
433 int64_t id;
434 };
435
436 struct vtn_access_chain {
437 uint32_t length;
438
439 /** Whether or not to treat the base pointer as an array. This is only
440 * true if this access chain came from an OpPtrAccessChain.
441 */
442 bool ptr_as_array;
443
444 /* Access qualifiers */
445 enum gl_access_qualifier access;
446
447 /** Struct elements and array offsets.
448 *
449 * This is an array of 1 so that it can conveniently be created on the
450 * stack but the real length is given by the length field.
451 */
452 struct vtn_access_link link[1];
453 };
454
455 enum vtn_variable_mode {
456 vtn_variable_mode_function,
457 vtn_variable_mode_private,
458 vtn_variable_mode_uniform,
459 vtn_variable_mode_ubo,
460 vtn_variable_mode_ssbo,
461 vtn_variable_mode_phys_ssbo,
462 vtn_variable_mode_push_constant,
463 vtn_variable_mode_workgroup,
464 vtn_variable_mode_cross_workgroup,
465 vtn_variable_mode_input,
466 vtn_variable_mode_output,
467 vtn_variable_mode_image,
468 };
469
470 struct vtn_pointer {
471 /** The variable mode for the referenced data */
472 enum vtn_variable_mode mode;
473
474 /** The dereferenced type of this pointer */
475 struct vtn_type *type;
476
477 /** The pointer type of this pointer
478 *
479 * This may be NULL for some temporary pointers constructed as part of a
480 * large load, store, or copy. It MUST be valid for all pointers which are
481 * stored as SPIR-V SSA values.
482 */
483 struct vtn_type *ptr_type;
484
485 /** The referenced variable, if known
486 *
487 * This field may be NULL if the pointer uses a (block_index, offset) pair
488 * instead of an access chain or if the access chain starts at a deref.
489 */
490 struct vtn_variable *var;
491
492 /** The NIR deref corresponding to this pointer */
493 nir_deref_instr *deref;
494
495 /** A (block_index, offset) pair representing a UBO or SSBO position. */
496 struct nir_ssa_def *block_index;
497 struct nir_ssa_def *offset;
498
499 /* Access qualifiers */
500 enum gl_access_qualifier access;
501 };
502
503 bool vtn_mode_uses_ssa_offset(struct vtn_builder *b,
504 enum vtn_variable_mode mode);
505
506 static inline bool vtn_pointer_uses_ssa_offset(struct vtn_builder *b,
507 struct vtn_pointer *ptr)
508 {
509 return vtn_mode_uses_ssa_offset(b, ptr->mode);
510 }
511
512
513 struct vtn_variable {
514 enum vtn_variable_mode mode;
515
516 struct vtn_type *type;
517
518 unsigned descriptor_set;
519 unsigned binding;
520 bool explicit_binding;
521 unsigned offset;
522 unsigned input_attachment_index;
523 bool patch;
524
525 nir_variable *var;
526
527 /* If the variable is a struct with a location set on it then this will be
528 * stored here. This will be used to calculate locations for members that
529 * don’t have their own explicit location.
530 */
531 int base_location;
532
533 int shared_location;
534
535 /**
536 * In some early released versions of GLSLang, it implemented all function
537 * calls by making copies of all parameters into temporary variables and
538 * passing those variables into the function. It even did so for samplers
539 * and images which violates the SPIR-V spec. Unfortunately, two games
540 * (Talos Principle and Doom) shipped with this old version of GLSLang and
541 * also happen to pass samplers into functions. Talos Principle received
542 * an update fairly shortly after release with an updated GLSLang. Doom,
543 * on the other hand, has never received an update so we need to work
544 * around this GLSLang issue in SPIR-V -> NIR. Hopefully, we can drop this
545 * hack at some point in the future.
546 */
547 struct vtn_pointer *copy_prop_sampler;
548
549 /* Access qualifiers. */
550 enum gl_access_qualifier access;
551 };
552
553 struct vtn_image_pointer {
554 struct vtn_pointer *image;
555 nir_ssa_def *coord;
556 nir_ssa_def *sample;
557 nir_ssa_def *lod;
558 };
559
560 struct vtn_sampled_image {
561 struct vtn_pointer *image; /* Image or array of images */
562 struct vtn_pointer *sampler; /* Sampler */
563 };
564
565 struct vtn_value {
566 enum vtn_value_type value_type;
567 const char *name;
568 struct vtn_decoration *decoration;
569 struct vtn_type *type;
570 union {
571 void *ptr;
572 char *str;
573 nir_constant *constant;
574 struct vtn_pointer *pointer;
575 struct vtn_image_pointer *image;
576 struct vtn_sampled_image *sampled_image;
577 struct vtn_function *func;
578 struct vtn_block *block;
579 struct vtn_ssa_value *ssa;
580 vtn_instruction_handler ext_handler;
581 };
582 };
583
584 #define VTN_DEC_DECORATION -1
585 #define VTN_DEC_EXECUTION_MODE -2
586 #define VTN_DEC_STRUCT_MEMBER0 0
587
588 struct vtn_decoration {
589 struct vtn_decoration *next;
590
591 /* Specifies how to apply this decoration. Negative values represent a
592 * decoration or execution mode. (See the VTN_DEC_ #defines above.)
593 * Non-negative values specify that it applies to a structure member.
594 */
595 int scope;
596
597 const uint32_t *operands;
598 struct vtn_value *group;
599
600 union {
601 SpvDecoration decoration;
602 SpvExecutionMode exec_mode;
603 };
604 };
605
606 struct vtn_builder {
607 nir_builder nb;
608
609 /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
610 jmp_buf fail_jump;
611
612 const uint32_t *spirv;
613 size_t spirv_word_count;
614
615 nir_shader *shader;
616 struct spirv_to_nir_options *options;
617 struct vtn_block *block;
618
619 /* Current offset, file, line, and column. Useful for debugging. Set
620 * automatically by vtn_foreach_instruction.
621 */
622 size_t spirv_offset;
623 char *file;
624 int line, col;
625
626 /*
627 * In SPIR-V, constants are global, whereas in NIR, the load_const
628 * instruction we use is per-function. So while we parse each function, we
629 * keep a hash table of constants we've resolved to nir_ssa_value's so
630 * far, and we lazily resolve them when we see them used in a function.
631 */
632 struct hash_table *const_table;
633
634 /*
635 * Map from phi instructions (pointer to the start of the instruction)
636 * to the variable corresponding to it.
637 */
638 struct hash_table *phi_table;
639
640 unsigned num_specializations;
641 struct nir_spirv_specialization *specializations;
642
643 unsigned value_id_bound;
644 struct vtn_value *values;
645
646 /* True if we should watch out for GLSLang issue #179 */
647 bool wa_glslang_179;
648
649 /* True if we need to fix up CS OpControlBarrier */
650 bool wa_glslang_cs_barrier;
651
652 gl_shader_stage entry_point_stage;
653 const char *entry_point_name;
654 struct vtn_value *entry_point;
655 struct vtn_value *workgroup_size_builtin;
656 bool variable_pointers;
657
658 struct vtn_function *func;
659 struct list_head functions;
660
661 /* Current function parameter index */
662 unsigned func_param_idx;
663
664 bool has_loop_continue;
665
666 /* false by default, set to true by the ContractionOff execution mode */
667 bool exact;
668
669 /* when a physical memory model is choosen */
670 bool physical_ptrs;
671 };
672
673 nir_ssa_def *
674 vtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr);
675 struct vtn_pointer *
676 vtn_pointer_from_ssa(struct vtn_builder *b, nir_ssa_def *ssa,
677 struct vtn_type *ptr_type);
678
679 static inline struct vtn_value *
680 vtn_untyped_value(struct vtn_builder *b, uint32_t value_id)
681 {
682 vtn_fail_if(value_id >= b->value_id_bound,
683 "SPIR-V id %u is out-of-bounds", value_id);
684 return &b->values[value_id];
685 }
686
687 /* Consider not using this function directly and instead use
688 * vtn_push_ssa/vtn_push_value_pointer so that appropriate applying of
689 * decorations is handled by common code.
690 */
691 static inline struct vtn_value *
692 vtn_push_value(struct vtn_builder *b, uint32_t value_id,
693 enum vtn_value_type value_type)
694 {
695 struct vtn_value *val = vtn_untyped_value(b, value_id);
696
697 vtn_fail_if(val->value_type != vtn_value_type_invalid,
698 "SPIR-V id %u has already been written by another instruction",
699 value_id);
700
701 val->value_type = value_type;
702
703 return &b->values[value_id];
704 }
705
706 static inline struct vtn_value *
707 vtn_value(struct vtn_builder *b, uint32_t value_id,
708 enum vtn_value_type value_type)
709 {
710 struct vtn_value *val = vtn_untyped_value(b, value_id);
711 vtn_fail_if(val->value_type != value_type,
712 "SPIR-V id %u is the wrong kind of value", value_id);
713 return val;
714 }
715
716 bool
717 vtn_set_instruction_result_type(struct vtn_builder *b, SpvOp opcode,
718 const uint32_t *w, unsigned count);
719
720 static inline uint64_t
721 vtn_constant_uint(struct vtn_builder *b, uint32_t value_id)
722 {
723 struct vtn_value *val = vtn_value(b, value_id, vtn_value_type_constant);
724
725 vtn_fail_if(val->type->base_type != vtn_base_type_scalar ||
726 !glsl_type_is_integer(val->type->type),
727 "Expected id %u to be an integer constant", value_id);
728
729 switch (glsl_get_bit_size(val->type->type)) {
730 case 8: return val->constant->values[0].u8;
731 case 16: return val->constant->values[0].u16;
732 case 32: return val->constant->values[0].u32;
733 case 64: return val->constant->values[0].u64;
734 default: unreachable("Invalid bit size");
735 }
736 }
737
738 static inline int64_t
739 vtn_constant_int(struct vtn_builder *b, uint32_t value_id)
740 {
741 struct vtn_value *val = vtn_value(b, value_id, vtn_value_type_constant);
742
743 vtn_fail_if(val->type->base_type != vtn_base_type_scalar ||
744 !glsl_type_is_integer(val->type->type),
745 "Expected id %u to be an integer constant", value_id);
746
747 switch (glsl_get_bit_size(val->type->type)) {
748 case 8: return val->constant->values[0].i8;
749 case 16: return val->constant->values[0].i16;
750 case 32: return val->constant->values[0].i32;
751 case 64: return val->constant->values[0].i64;
752 default: unreachable("Invalid bit size");
753 }
754 }
755
756 static inline enum gl_access_qualifier vtn_value_access(struct vtn_value *value)
757 {
758 switch (value->value_type) {
759 case vtn_value_type_invalid:
760 case vtn_value_type_undef:
761 case vtn_value_type_string:
762 case vtn_value_type_decoration_group:
763 case vtn_value_type_constant:
764 case vtn_value_type_function:
765 case vtn_value_type_block:
766 case vtn_value_type_extension:
767 return 0;
768 case vtn_value_type_type:
769 return value->type->access;
770 case vtn_value_type_pointer:
771 return value->pointer->access;
772 case vtn_value_type_ssa:
773 return value->ssa->access;
774 case vtn_value_type_image_pointer:
775 return value->image->image->access;
776 case vtn_value_type_sampled_image:
777 return value->sampled_image->image->access |
778 value->sampled_image->sampler->access;
779 }
780
781 unreachable("invalid type");
782 }
783
784 struct vtn_ssa_value *vtn_ssa_value(struct vtn_builder *b, uint32_t value_id);
785
786 struct vtn_value *vtn_push_value_pointer(struct vtn_builder *b,
787 uint32_t value_id,
788 struct vtn_pointer *ptr);
789
790 struct vtn_value *vtn_push_ssa(struct vtn_builder *b, uint32_t value_id,
791 struct vtn_type *type, struct vtn_ssa_value *ssa);
792
793 struct vtn_ssa_value *vtn_create_ssa_value(struct vtn_builder *b,
794 const struct glsl_type *type);
795
796 struct vtn_ssa_value *vtn_ssa_transpose(struct vtn_builder *b,
797 struct vtn_ssa_value *src);
798
799 nir_ssa_def *vtn_vector_extract(struct vtn_builder *b, nir_ssa_def *src,
800 unsigned index);
801 nir_ssa_def *vtn_vector_extract_dynamic(struct vtn_builder *b, nir_ssa_def *src,
802 nir_ssa_def *index);
803 nir_ssa_def *vtn_vector_insert(struct vtn_builder *b, nir_ssa_def *src,
804 nir_ssa_def *insert, unsigned index);
805 nir_ssa_def *vtn_vector_insert_dynamic(struct vtn_builder *b, nir_ssa_def *src,
806 nir_ssa_def *insert, nir_ssa_def *index);
807
808 nir_deref_instr *vtn_nir_deref(struct vtn_builder *b, uint32_t id);
809
810 struct vtn_pointer *vtn_pointer_for_variable(struct vtn_builder *b,
811 struct vtn_variable *var,
812 struct vtn_type *ptr_type);
813
814 nir_deref_instr *vtn_pointer_to_deref(struct vtn_builder *b,
815 struct vtn_pointer *ptr);
816 nir_ssa_def *
817 vtn_pointer_to_offset(struct vtn_builder *b, struct vtn_pointer *ptr,
818 nir_ssa_def **index_out);
819
820 struct vtn_ssa_value *
821 vtn_local_load(struct vtn_builder *b, nir_deref_instr *src,
822 enum gl_access_qualifier access);
823
824 void vtn_local_store(struct vtn_builder *b, struct vtn_ssa_value *src,
825 nir_deref_instr *dest,
826 enum gl_access_qualifier access);
827
828 struct vtn_ssa_value *
829 vtn_variable_load(struct vtn_builder *b, struct vtn_pointer *src);
830
831 void vtn_variable_store(struct vtn_builder *b, struct vtn_ssa_value *src,
832 struct vtn_pointer *dest);
833
834 void vtn_handle_variables(struct vtn_builder *b, SpvOp opcode,
835 const uint32_t *w, unsigned count);
836
837
838 typedef void (*vtn_decoration_foreach_cb)(struct vtn_builder *,
839 struct vtn_value *,
840 int member,
841 const struct vtn_decoration *,
842 void *);
843
844 void vtn_foreach_decoration(struct vtn_builder *b, struct vtn_value *value,
845 vtn_decoration_foreach_cb cb, void *data);
846
847 typedef void (*vtn_execution_mode_foreach_cb)(struct vtn_builder *,
848 struct vtn_value *,
849 const struct vtn_decoration *,
850 void *);
851
852 void vtn_foreach_execution_mode(struct vtn_builder *b, struct vtn_value *value,
853 vtn_execution_mode_foreach_cb cb, void *data);
854
855 nir_op vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder *b,
856 SpvOp opcode, bool *swap,
857 unsigned src_bit_size, unsigned dst_bit_size);
858
859 void vtn_handle_alu(struct vtn_builder *b, SpvOp opcode,
860 const uint32_t *w, unsigned count);
861
862 void vtn_handle_bitcast(struct vtn_builder *b, const uint32_t *w,
863 unsigned count);
864
865 void vtn_handle_subgroup(struct vtn_builder *b, SpvOp opcode,
866 const uint32_t *w, unsigned count);
867
868 bool vtn_handle_glsl450_instruction(struct vtn_builder *b, SpvOp ext_opcode,
869 const uint32_t *words, unsigned count);
870
871 bool vtn_handle_opencl_instruction(struct vtn_builder *b, SpvOp ext_opcode,
872 const uint32_t *words, unsigned count);
873
874 struct vtn_builder* vtn_create_builder(const uint32_t *words, size_t word_count,
875 gl_shader_stage stage, const char *entry_point_name,
876 const struct spirv_to_nir_options *options);
877
878 void vtn_handle_entry_point(struct vtn_builder *b, const uint32_t *w,
879 unsigned count);
880
881 void vtn_handle_decoration(struct vtn_builder *b, SpvOp opcode,
882 const uint32_t *w, unsigned count);
883
884 enum vtn_variable_mode vtn_storage_class_to_mode(struct vtn_builder *b,
885 SpvStorageClass class,
886 struct vtn_type *interface_type,
887 nir_variable_mode *nir_mode_out);
888
889 nir_address_format vtn_mode_to_address_format(struct vtn_builder *b,
890 enum vtn_variable_mode);
891
892 static inline uint32_t
893 vtn_align_u32(uint32_t v, uint32_t a)
894 {
895 assert(a != 0 && a == (a & -((int32_t) a)));
896 return (v + a - 1) & ~(a - 1);
897 }
898
899 static inline uint64_t
900 vtn_u64_literal(const uint32_t *w)
901 {
902 return (uint64_t)w[1] << 32 | w[0];
903 }
904
905 bool vtn_handle_amd_gcn_shader_instruction(struct vtn_builder *b, SpvOp ext_opcode,
906 const uint32_t *words, unsigned count);
907
908 bool vtn_handle_amd_shader_ballot_instruction(struct vtn_builder *b, SpvOp ext_opcode,
909 const uint32_t *w, unsigned count);
910
911 bool vtn_handle_amd_shader_trinary_minmax_instruction(struct vtn_builder *b, SpvOp ext_opcode,
912 const uint32_t *words, unsigned count);
913
914 bool vtn_handle_amd_shader_explicit_vertex_parameter_instruction(struct vtn_builder *b,
915 SpvOp ext_opcode,
916 const uint32_t *words,
917 unsigned count);
918
919 SpvMemorySemanticsMask vtn_storage_class_to_memory_semantics(SpvStorageClass sc);
920
921 void vtn_emit_memory_barrier(struct vtn_builder *b, SpvScope scope,
922 SpvMemorySemanticsMask semantics);
923
924 #endif /* _VTN_PRIVATE_H_ */