spirv: wrap push ssa/pointer values
[mesa.git] / src / compiler / spirv / vtn_private.h
1 /*
2 * Copyright © 2015 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 * Jason Ekstrand (jason@jlekstrand.net)
25 *
26 */
27
28 #ifndef _VTN_PRIVATE_H_
29 #define _VTN_PRIVATE_H_
30
31 #include <setjmp.h>
32
33 #include "nir/nir.h"
34 #include "nir/nir_builder.h"
35 #include "util/u_dynarray.h"
36 #include "nir_spirv.h"
37 #include "spirv.h"
38
39 struct vtn_builder;
40 struct vtn_decoration;
41
42 void vtn_log(struct vtn_builder *b, enum nir_spirv_debug_level level,
43 size_t spirv_offset, const char *message);
44
45 void vtn_logf(struct vtn_builder *b, enum nir_spirv_debug_level level,
46 size_t spirv_offset, const char *fmt, ...) PRINTFLIKE(4, 5);
47
48 #define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
49
50 void _vtn_warn(struct vtn_builder *b, const char *file, unsigned line,
51 const char *fmt, ...) PRINTFLIKE(4, 5);
52 #define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
53
54 void _vtn_err(struct vtn_builder *b, const char *file, unsigned line,
55 const char *fmt, ...) PRINTFLIKE(4, 5);
56 #define vtn_err(...) _vtn_err(b, __FILE__, __LINE__, __VA_ARGS__)
57
58 /** Fail SPIR-V parsing
59 *
60 * This function logs an error and then bails out of the shader compile using
61 * longjmp. This being safe relies on two things:
62 *
63 * 1) We must guarantee that setjmp is called after allocating the builder
64 * and setting up b->debug (so that logging works) but before before any
65 * errors have a chance to occur.
66 *
67 * 2) While doing the SPIR-V -> NIR conversion, we need to be careful to
68 * ensure that all heap allocations happen through ralloc and are parented
69 * to the builder. This way they will get properly cleaned up on error.
70 *
71 * 3) We must ensure that _vtn_fail is never called while a mutex lock or a
72 * reference to any other resource is held with the exception of ralloc
73 * objects which are parented to the builder.
74 *
75 * So long as these two things continue to hold, we can easily longjmp back to
76 * spirv_to_nir(), clean up the builder, and return NULL.
77 */
78 NORETURN void
79 _vtn_fail(struct vtn_builder *b, const char *file, unsigned line,
80 const char *fmt, ...) PRINTFLIKE(4, 5);
81
82 #define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
83
84 /** Fail if the given expression evaluates to true */
85 #define vtn_fail_if(expr, ...) \
86 do { \
87 if (unlikely(expr)) \
88 vtn_fail(__VA_ARGS__); \
89 } while (0)
90
91 #define _vtn_fail_with(t, msg, v) \
92 vtn_fail("%s: %s (%u)\n", msg, spirv_ ## t ## _to_string(v), v)
93
94 #define vtn_fail_with_decoration(msg, v) _vtn_fail_with(decoration, msg, v)
95 #define vtn_fail_with_opcode(msg, v) _vtn_fail_with(op, msg, v)
96
97 /** Assert that a condition is true and, if it isn't, vtn_fail
98 *
99 * This macro is transitional only and should not be used in new code. Use
100 * vtn_fail_if and provide a real message instead.
101 */
102 #define vtn_assert(expr) \
103 do { \
104 if (!likely(expr)) \
105 vtn_fail("%s", #expr); \
106 } while (0)
107
108 enum vtn_value_type {
109 vtn_value_type_invalid = 0,
110 vtn_value_type_undef,
111 vtn_value_type_string,
112 vtn_value_type_decoration_group,
113 vtn_value_type_type,
114 vtn_value_type_constant,
115 vtn_value_type_pointer,
116 vtn_value_type_function,
117 vtn_value_type_block,
118 vtn_value_type_ssa,
119 vtn_value_type_extension,
120 vtn_value_type_image_pointer,
121 vtn_value_type_sampled_image,
122 };
123
124 enum vtn_branch_type {
125 vtn_branch_type_none,
126 vtn_branch_type_switch_break,
127 vtn_branch_type_switch_fallthrough,
128 vtn_branch_type_loop_break,
129 vtn_branch_type_loop_continue,
130 vtn_branch_type_discard,
131 vtn_branch_type_return,
132 };
133
134 enum vtn_cf_node_type {
135 vtn_cf_node_type_block,
136 vtn_cf_node_type_if,
137 vtn_cf_node_type_loop,
138 vtn_cf_node_type_switch,
139 };
140
141 struct vtn_cf_node {
142 struct list_head link;
143 enum vtn_cf_node_type type;
144 };
145
146 struct vtn_loop {
147 struct vtn_cf_node node;
148
149 /* The main body of the loop */
150 struct list_head body;
151
152 /* The "continue" part of the loop. This gets executed after the body
153 * and is where you go when you hit a continue.
154 */
155 struct list_head cont_body;
156
157 SpvLoopControlMask control;
158 };
159
160 struct vtn_if {
161 struct vtn_cf_node node;
162
163 uint32_t condition;
164
165 enum vtn_branch_type then_type;
166 struct list_head then_body;
167
168 enum vtn_branch_type else_type;
169 struct list_head else_body;
170
171 SpvSelectionControlMask control;
172 };
173
174 struct vtn_case {
175 struct list_head link;
176
177 struct list_head body;
178
179 /* The block that starts this case */
180 struct vtn_block *start_block;
181
182 /* The fallthrough case, if any */
183 struct vtn_case *fallthrough;
184
185 /* The uint32_t values that map to this case */
186 struct util_dynarray values;
187
188 /* True if this is the default case */
189 bool is_default;
190
191 /* Initialized to false; used when sorting the list of cases */
192 bool visited;
193 };
194
195 struct vtn_switch {
196 struct vtn_cf_node node;
197
198 uint32_t selector;
199
200 struct list_head cases;
201 };
202
203 struct vtn_block {
204 struct vtn_cf_node node;
205
206 /** A pointer to the label instruction */
207 const uint32_t *label;
208
209 /** A pointer to the merge instruction (or NULL if non exists) */
210 const uint32_t *merge;
211
212 /** A pointer to the branch instruction that ends this block */
213 const uint32_t *branch;
214
215 enum vtn_branch_type branch_type;
216
217 /** Points to the loop that this block starts (if it starts a loop) */
218 struct vtn_loop *loop;
219
220 /** Points to the switch case started by this block (if any) */
221 struct vtn_case *switch_case;
222
223 /** Every block ends in a nop intrinsic so that we can find it again */
224 nir_intrinsic_instr *end_nop;
225 };
226
227 struct vtn_function {
228 struct exec_node node;
229
230 struct vtn_type *type;
231
232 bool referenced;
233 bool emitted;
234
235 nir_function_impl *impl;
236 struct vtn_block *start_block;
237
238 struct list_head body;
239
240 const uint32_t *end;
241
242 SpvFunctionControlMask control;
243 };
244
245 typedef bool (*vtn_instruction_handler)(struct vtn_builder *, SpvOp,
246 const uint32_t *, unsigned);
247
248 void vtn_build_cfg(struct vtn_builder *b, const uint32_t *words,
249 const uint32_t *end);
250 void vtn_function_emit(struct vtn_builder *b, struct vtn_function *func,
251 vtn_instruction_handler instruction_handler);
252 void vtn_handle_function_call(struct vtn_builder *b, SpvOp opcode,
253 const uint32_t *w, unsigned count);
254
255 const uint32_t *
256 vtn_foreach_instruction(struct vtn_builder *b, const uint32_t *start,
257 const uint32_t *end, vtn_instruction_handler handler);
258
259 struct vtn_ssa_value {
260 union {
261 nir_ssa_def *def;
262 struct vtn_ssa_value **elems;
263 };
264
265 /* For matrices, if this is non-NULL, then this value is actually the
266 * transpose of some other value. The value that `transposed` points to
267 * always dominates this value.
268 */
269 struct vtn_ssa_value *transposed;
270
271 const struct glsl_type *type;
272 };
273
274 enum vtn_base_type {
275 vtn_base_type_void,
276 vtn_base_type_scalar,
277 vtn_base_type_vector,
278 vtn_base_type_matrix,
279 vtn_base_type_array,
280 vtn_base_type_struct,
281 vtn_base_type_pointer,
282 vtn_base_type_image,
283 vtn_base_type_sampler,
284 vtn_base_type_sampled_image,
285 vtn_base_type_function,
286 };
287
288 struct vtn_type {
289 enum vtn_base_type base_type;
290
291 const struct glsl_type *type;
292
293 /* The SPIR-V id of the given type. */
294 uint32_t id;
295
296 /* Specifies the length of complex types.
297 *
298 * For Workgroup pointers, this is the size of the referenced type.
299 */
300 unsigned length;
301
302 /* for arrays, matrices and pointers, the array stride */
303 unsigned stride;
304
305 /* Access qualifiers */
306 enum gl_access_qualifier access;
307
308 union {
309 /* Members for scalar, vector, and array-like types */
310 struct {
311 /* for arrays, the vtn_type for the elements of the array */
312 struct vtn_type *array_element;
313
314 /* for matrices, whether the matrix is stored row-major */
315 bool row_major:1;
316
317 /* Whether this type, or a parent type, has been decorated as a
318 * builtin
319 */
320 bool is_builtin:1;
321
322 /* Which built-in to use */
323 SpvBuiltIn builtin;
324 };
325
326 /* Members for struct types */
327 struct {
328 /* for structures, the vtn_type for each member */
329 struct vtn_type **members;
330
331 /* for structs, the offset of each member */
332 unsigned *offsets;
333
334 /* for structs, whether it was decorated as a "non-SSBO-like" block */
335 bool block:1;
336
337 /* for structs, whether it was decorated as an "SSBO-like" block */
338 bool buffer_block:1;
339
340 /* for structs with block == true, whether this is a builtin block
341 * (i.e. a block that contains only builtins).
342 */
343 bool builtin_block:1;
344
345 /* for structs and unions it specifies the minimum alignment of the
346 * members. 0 means packed.
347 *
348 * Set by CPacked and Alignment Decorations in kernels.
349 */
350 bool packed:1;
351 };
352
353 /* Members for pointer types */
354 struct {
355 /* For pointers, the vtn_type for dereferenced type */
356 struct vtn_type *deref;
357
358 /* Storage class for pointers */
359 SpvStorageClass storage_class;
360
361 /* Required alignment for pointers */
362 uint32_t align;
363 };
364
365 /* Members for image types */
366 struct {
367 /* For images, indicates whether it's sampled or storage */
368 bool sampled;
369
370 /* Image format for image_load_store type images */
371 unsigned image_format;
372
373 /* Access qualifier for storage images */
374 SpvAccessQualifier access_qualifier;
375 };
376
377 /* Members for sampled image types */
378 struct {
379 /* For sampled images, the image type */
380 struct vtn_type *image;
381 };
382
383 /* Members for function types */
384 struct {
385 /* For functions, the vtn_type for each parameter */
386 struct vtn_type **params;
387
388 /* Return type for functions */
389 struct vtn_type *return_type;
390 };
391 };
392 };
393
394 bool vtn_type_contains_block(struct vtn_builder *b, struct vtn_type *type);
395
396 bool vtn_types_compatible(struct vtn_builder *b,
397 struct vtn_type *t1, struct vtn_type *t2);
398
399 struct vtn_type *vtn_type_without_array(struct vtn_type *type);
400
401 struct vtn_variable;
402
403 enum vtn_access_mode {
404 vtn_access_mode_id,
405 vtn_access_mode_literal,
406 };
407
408 struct vtn_access_link {
409 enum vtn_access_mode mode;
410 int64_t id;
411 };
412
413 struct vtn_access_chain {
414 uint32_t length;
415
416 /** Whether or not to treat the base pointer as an array. This is only
417 * true if this access chain came from an OpPtrAccessChain.
418 */
419 bool ptr_as_array;
420
421 /** Struct elements and array offsets.
422 *
423 * This is an array of 1 so that it can conveniently be created on the
424 * stack but the real length is given by the length field.
425 */
426 struct vtn_access_link link[1];
427 };
428
429 enum vtn_variable_mode {
430 vtn_variable_mode_function,
431 vtn_variable_mode_private,
432 vtn_variable_mode_uniform,
433 vtn_variable_mode_ubo,
434 vtn_variable_mode_ssbo,
435 vtn_variable_mode_phys_ssbo,
436 vtn_variable_mode_push_constant,
437 vtn_variable_mode_workgroup,
438 vtn_variable_mode_cross_workgroup,
439 vtn_variable_mode_input,
440 vtn_variable_mode_output,
441 vtn_variable_mode_image,
442 };
443
444 struct vtn_pointer {
445 /** The variable mode for the referenced data */
446 enum vtn_variable_mode mode;
447
448 /** The dereferenced type of this pointer */
449 struct vtn_type *type;
450
451 /** The pointer type of this pointer
452 *
453 * This may be NULL for some temporary pointers constructed as part of a
454 * large load, store, or copy. It MUST be valid for all pointers which are
455 * stored as SPIR-V SSA values.
456 */
457 struct vtn_type *ptr_type;
458
459 /** The referenced variable, if known
460 *
461 * This field may be NULL if the pointer uses a (block_index, offset) pair
462 * instead of an access chain or if the access chain starts at a deref.
463 */
464 struct vtn_variable *var;
465
466 /** The NIR deref corresponding to this pointer */
467 nir_deref_instr *deref;
468
469 /** A (block_index, offset) pair representing a UBO or SSBO position. */
470 struct nir_ssa_def *block_index;
471 struct nir_ssa_def *offset;
472
473 /* Access qualifiers */
474 enum gl_access_qualifier access;
475 };
476
477 bool vtn_mode_uses_ssa_offset(struct vtn_builder *b,
478 enum vtn_variable_mode mode);
479
480 static inline bool vtn_pointer_uses_ssa_offset(struct vtn_builder *b,
481 struct vtn_pointer *ptr)
482 {
483 return vtn_mode_uses_ssa_offset(b, ptr->mode);
484 }
485
486
487 struct vtn_variable {
488 enum vtn_variable_mode mode;
489
490 struct vtn_type *type;
491
492 unsigned descriptor_set;
493 unsigned binding;
494 bool explicit_binding;
495 unsigned offset;
496 unsigned input_attachment_index;
497 bool patch;
498
499 nir_variable *var;
500
501 /* If the variable is a struct with a location set on it then this will be
502 * stored here. This will be used to calculate locations for members that
503 * don’t have their own explicit location.
504 */
505 int base_location;
506
507 int shared_location;
508
509 /**
510 * In some early released versions of GLSLang, it implemented all function
511 * calls by making copies of all parameters into temporary variables and
512 * passing those variables into the function. It even did so for samplers
513 * and images which violates the SPIR-V spec. Unfortunately, two games
514 * (Talos Principle and Doom) shipped with this old version of GLSLang and
515 * also happen to pass samplers into functions. Talos Principle received
516 * an update fairly shortly after release with an updated GLSLang. Doom,
517 * on the other hand, has never received an update so we need to work
518 * around this GLSLang issue in SPIR-V -> NIR. Hopefully, we can drop this
519 * hack at some point in the future.
520 */
521 struct vtn_pointer *copy_prop_sampler;
522
523 /* Access qualifiers. */
524 enum gl_access_qualifier access;
525 };
526
527 struct vtn_image_pointer {
528 struct vtn_pointer *image;
529 nir_ssa_def *coord;
530 nir_ssa_def *sample;
531 };
532
533 struct vtn_sampled_image {
534 struct vtn_type *type;
535 struct vtn_pointer *image; /* Image or array of images */
536 struct vtn_pointer *sampler; /* Sampler */
537 };
538
539 struct vtn_value {
540 enum vtn_value_type value_type;
541 const char *name;
542 struct vtn_decoration *decoration;
543 struct vtn_type *type;
544 union {
545 void *ptr;
546 char *str;
547 nir_constant *constant;
548 struct vtn_pointer *pointer;
549 struct vtn_image_pointer *image;
550 struct vtn_sampled_image *sampled_image;
551 struct vtn_function *func;
552 struct vtn_block *block;
553 struct vtn_ssa_value *ssa;
554 vtn_instruction_handler ext_handler;
555 };
556 };
557
558 #define VTN_DEC_DECORATION -1
559 #define VTN_DEC_EXECUTION_MODE -2
560 #define VTN_DEC_STRUCT_MEMBER0 0
561
562 struct vtn_decoration {
563 struct vtn_decoration *next;
564
565 /* Specifies how to apply this decoration. Negative values represent a
566 * decoration or execution mode. (See the VTN_DEC_ #defines above.)
567 * Non-negative values specify that it applies to a structure member.
568 */
569 int scope;
570
571 const uint32_t *operands;
572 struct vtn_value *group;
573
574 union {
575 SpvDecoration decoration;
576 SpvExecutionMode exec_mode;
577 };
578 };
579
580 struct vtn_builder {
581 nir_builder nb;
582
583 /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
584 jmp_buf fail_jump;
585
586 const uint32_t *spirv;
587 size_t spirv_word_count;
588
589 nir_shader *shader;
590 struct spirv_to_nir_options *options;
591 struct vtn_block *block;
592
593 /* Current offset, file, line, and column. Useful for debugging. Set
594 * automatically by vtn_foreach_instruction.
595 */
596 size_t spirv_offset;
597 char *file;
598 int line, col;
599
600 /*
601 * In SPIR-V, constants are global, whereas in NIR, the load_const
602 * instruction we use is per-function. So while we parse each function, we
603 * keep a hash table of constants we've resolved to nir_ssa_value's so
604 * far, and we lazily resolve them when we see them used in a function.
605 */
606 struct hash_table *const_table;
607
608 /*
609 * Map from phi instructions (pointer to the start of the instruction)
610 * to the variable corresponding to it.
611 */
612 struct hash_table *phi_table;
613
614 unsigned num_specializations;
615 struct nir_spirv_specialization *specializations;
616
617 unsigned value_id_bound;
618 struct vtn_value *values;
619
620 /* True if we should watch out for GLSLang issue #179 */
621 bool wa_glslang_179;
622
623 gl_shader_stage entry_point_stage;
624 const char *entry_point_name;
625 struct vtn_value *entry_point;
626 struct vtn_value *workgroup_size_builtin;
627 bool variable_pointers;
628
629 struct vtn_function *func;
630 struct exec_list functions;
631
632 /* Current function parameter index */
633 unsigned func_param_idx;
634
635 bool has_loop_continue;
636
637 /* false by default, set to true by the ContractionOff execution mode */
638 bool exact;
639
640 /* when a physical memory model is choosen */
641 bool physical_ptrs;
642 };
643
644 nir_ssa_def *
645 vtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr);
646 struct vtn_pointer *
647 vtn_pointer_from_ssa(struct vtn_builder *b, nir_ssa_def *ssa,
648 struct vtn_type *ptr_type);
649
650 static inline struct vtn_value *
651 vtn_untyped_value(struct vtn_builder *b, uint32_t value_id)
652 {
653 vtn_fail_if(value_id >= b->value_id_bound,
654 "SPIR-V id %u is out-of-bounds", value_id);
655 return &b->values[value_id];
656 }
657
658 /* Consider not using this function directly and instead use
659 * vtn_push_ssa/vtn_push_value_pointer so that appropriate applying of
660 * decorations is handled by common code.
661 */
662 static inline struct vtn_value *
663 vtn_push_value(struct vtn_builder *b, uint32_t value_id,
664 enum vtn_value_type value_type)
665 {
666 struct vtn_value *val = vtn_untyped_value(b, value_id);
667
668 vtn_fail_if(val->value_type != vtn_value_type_invalid,
669 "SPIR-V id %u has already been written by another instruction",
670 value_id);
671
672 val->value_type = value_type;
673
674 return &b->values[value_id];
675 }
676
677 static inline struct vtn_value *
678 vtn_value(struct vtn_builder *b, uint32_t value_id,
679 enum vtn_value_type value_type)
680 {
681 struct vtn_value *val = vtn_untyped_value(b, value_id);
682 vtn_fail_if(val->value_type != value_type,
683 "SPIR-V id %u is the wrong kind of value", value_id);
684 return val;
685 }
686
687 bool
688 vtn_set_instruction_result_type(struct vtn_builder *b, SpvOp opcode,
689 const uint32_t *w, unsigned count);
690
691 static inline uint64_t
692 vtn_constant_uint(struct vtn_builder *b, uint32_t value_id)
693 {
694 struct vtn_value *val = vtn_value(b, value_id, vtn_value_type_constant);
695
696 vtn_fail_if(val->type->base_type != vtn_base_type_scalar ||
697 !glsl_type_is_integer(val->type->type),
698 "Expected id %u to be an integer constant", value_id);
699
700 switch (glsl_get_bit_size(val->type->type)) {
701 case 8: return val->constant->values[0].u8;
702 case 16: return val->constant->values[0].u16;
703 case 32: return val->constant->values[0].u32;
704 case 64: return val->constant->values[0].u64;
705 default: unreachable("Invalid bit size");
706 }
707 }
708
709 static inline int64_t
710 vtn_constant_int(struct vtn_builder *b, uint32_t value_id)
711 {
712 struct vtn_value *val = vtn_value(b, value_id, vtn_value_type_constant);
713
714 vtn_fail_if(val->type->base_type != vtn_base_type_scalar ||
715 !glsl_type_is_integer(val->type->type),
716 "Expected id %u to be an integer constant", value_id);
717
718 switch (glsl_get_bit_size(val->type->type)) {
719 case 8: return val->constant->values[0].i8;
720 case 16: return val->constant->values[0].i16;
721 case 32: return val->constant->values[0].i32;
722 case 64: return val->constant->values[0].i64;
723 default: unreachable("Invalid bit size");
724 }
725 }
726
727 struct vtn_ssa_value *vtn_ssa_value(struct vtn_builder *b, uint32_t value_id);
728
729 struct vtn_value *vtn_push_value_pointer(struct vtn_builder *b,
730 uint32_t value_id,
731 struct vtn_pointer *ptr);
732
733 struct vtn_value *vtn_push_ssa(struct vtn_builder *b, uint32_t value_id,
734 struct vtn_type *type, struct vtn_ssa_value *ssa);
735
736 struct vtn_ssa_value *vtn_create_ssa_value(struct vtn_builder *b,
737 const struct glsl_type *type);
738
739 struct vtn_ssa_value *vtn_ssa_transpose(struct vtn_builder *b,
740 struct vtn_ssa_value *src);
741
742 nir_ssa_def *vtn_vector_extract(struct vtn_builder *b, nir_ssa_def *src,
743 unsigned index);
744 nir_ssa_def *vtn_vector_extract_dynamic(struct vtn_builder *b, nir_ssa_def *src,
745 nir_ssa_def *index);
746 nir_ssa_def *vtn_vector_insert(struct vtn_builder *b, nir_ssa_def *src,
747 nir_ssa_def *insert, unsigned index);
748 nir_ssa_def *vtn_vector_insert_dynamic(struct vtn_builder *b, nir_ssa_def *src,
749 nir_ssa_def *insert, nir_ssa_def *index);
750
751 nir_deref_instr *vtn_nir_deref(struct vtn_builder *b, uint32_t id);
752
753 struct vtn_pointer *vtn_pointer_for_variable(struct vtn_builder *b,
754 struct vtn_variable *var,
755 struct vtn_type *ptr_type);
756
757 nir_deref_instr *vtn_pointer_to_deref(struct vtn_builder *b,
758 struct vtn_pointer *ptr);
759 nir_ssa_def *
760 vtn_pointer_to_offset(struct vtn_builder *b, struct vtn_pointer *ptr,
761 nir_ssa_def **index_out);
762
763 struct vtn_ssa_value *
764 vtn_local_load(struct vtn_builder *b, nir_deref_instr *src,
765 enum gl_access_qualifier access);
766
767 void vtn_local_store(struct vtn_builder *b, struct vtn_ssa_value *src,
768 nir_deref_instr *dest,
769 enum gl_access_qualifier access);
770
771 struct vtn_ssa_value *
772 vtn_variable_load(struct vtn_builder *b, struct vtn_pointer *src);
773
774 void vtn_variable_store(struct vtn_builder *b, struct vtn_ssa_value *src,
775 struct vtn_pointer *dest);
776
777 void vtn_handle_variables(struct vtn_builder *b, SpvOp opcode,
778 const uint32_t *w, unsigned count);
779
780
781 typedef void (*vtn_decoration_foreach_cb)(struct vtn_builder *,
782 struct vtn_value *,
783 int member,
784 const struct vtn_decoration *,
785 void *);
786
787 void vtn_foreach_decoration(struct vtn_builder *b, struct vtn_value *value,
788 vtn_decoration_foreach_cb cb, void *data);
789
790 typedef void (*vtn_execution_mode_foreach_cb)(struct vtn_builder *,
791 struct vtn_value *,
792 const struct vtn_decoration *,
793 void *);
794
795 void vtn_foreach_execution_mode(struct vtn_builder *b, struct vtn_value *value,
796 vtn_execution_mode_foreach_cb cb, void *data);
797
798 nir_op vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder *b,
799 SpvOp opcode, bool *swap,
800 unsigned src_bit_size, unsigned dst_bit_size);
801
802 void vtn_handle_alu(struct vtn_builder *b, SpvOp opcode,
803 const uint32_t *w, unsigned count);
804
805 void vtn_handle_bitcast(struct vtn_builder *b, const uint32_t *w,
806 unsigned count);
807
808 void vtn_handle_subgroup(struct vtn_builder *b, SpvOp opcode,
809 const uint32_t *w, unsigned count);
810
811 bool vtn_handle_glsl450_instruction(struct vtn_builder *b, SpvOp ext_opcode,
812 const uint32_t *words, unsigned count);
813
814 bool vtn_handle_opencl_instruction(struct vtn_builder *b, uint32_t ext_opcode,
815 const uint32_t *words, unsigned count);
816
817 struct vtn_builder* vtn_create_builder(const uint32_t *words, size_t word_count,
818 gl_shader_stage stage, const char *entry_point_name,
819 const struct spirv_to_nir_options *options);
820
821 void vtn_handle_entry_point(struct vtn_builder *b, const uint32_t *w,
822 unsigned count);
823
824 void vtn_handle_decoration(struct vtn_builder *b, SpvOp opcode,
825 const uint32_t *w, unsigned count);
826
827 enum vtn_variable_mode vtn_storage_class_to_mode(struct vtn_builder *b,
828 SpvStorageClass class,
829 struct vtn_type *interface_type,
830 nir_variable_mode *nir_mode_out);
831
832 nir_address_format vtn_mode_to_address_format(struct vtn_builder *b,
833 enum vtn_variable_mode);
834
835 static inline uint32_t
836 vtn_align_u32(uint32_t v, uint32_t a)
837 {
838 assert(a != 0 && a == (a & -((int32_t) a)));
839 return (v + a - 1) & ~(a - 1);
840 }
841
842 static inline uint64_t
843 vtn_u64_literal(const uint32_t *w)
844 {
845 return (uint64_t)w[1] << 32 | w[0];
846 }
847
848 bool vtn_handle_amd_gcn_shader_instruction(struct vtn_builder *b, SpvOp ext_opcode,
849 const uint32_t *words, unsigned count);
850
851 bool vtn_handle_amd_shader_ballot_instruction(struct vtn_builder *b, SpvOp ext_opcode,
852 const uint32_t *w, unsigned count);
853
854 bool vtn_handle_amd_shader_trinary_minmax_instruction(struct vtn_builder *b, SpvOp ext_opcode,
855 const uint32_t *words, unsigned count);
856 #endif /* _VTN_PRIVATE_H_ */