2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Jason Ekstrand (jason@jlekstrand.net)
28 #ifndef _VTN_PRIVATE_H_
29 #define _VTN_PRIVATE_H_
34 #include "nir/nir_builder.h"
35 #include "util/u_dynarray.h"
36 #include "nir_spirv.h"
40 struct vtn_decoration
;
42 void vtn_log(struct vtn_builder
*b
, enum nir_spirv_debug_level level
,
43 size_t spirv_offset
, const char *message
);
45 void vtn_logf(struct vtn_builder
*b
, enum nir_spirv_debug_level level
,
46 size_t spirv_offset
, const char *fmt
, ...) PRINTFLIKE(4, 5);
48 #define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
50 void _vtn_warn(struct vtn_builder
*b
, const char *file
, unsigned line
,
51 const char *fmt
, ...) PRINTFLIKE(4, 5);
52 #define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
54 void _vtn_err(struct vtn_builder
*b
, const char *file
, unsigned line
,
55 const char *fmt
, ...) PRINTFLIKE(4, 5);
56 #define vtn_err(...) _vtn_err(b, __FILE__, __LINE__, __VA_ARGS__)
58 /** Fail SPIR-V parsing
60 * This function logs an error and then bails out of the shader compile using
61 * longjmp. This being safe relies on two things:
63 * 1) We must guarantee that setjmp is called after allocating the builder
64 * and setting up b->debug (so that logging works) but before before any
65 * errors have a chance to occur.
67 * 2) While doing the SPIR-V -> NIR conversion, we need to be careful to
68 * ensure that all heap allocations happen through ralloc and are parented
69 * to the builder. This way they will get properly cleaned up on error.
71 * 3) We must ensure that _vtn_fail is never called while a mutex lock or a
72 * reference to any other resource is held with the exception of ralloc
73 * objects which are parented to the builder.
75 * So long as these two things continue to hold, we can easily longjmp back to
76 * spirv_to_nir(), clean up the builder, and return NULL.
79 _vtn_fail(struct vtn_builder
*b
, const char *file
, unsigned line
,
80 const char *fmt
, ...) PRINTFLIKE(4, 5);
82 #define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
84 /** Fail if the given expression evaluates to true */
85 #define vtn_fail_if(expr, ...) \
88 vtn_fail(__VA_ARGS__); \
91 #define _vtn_fail_with(t, msg, v) \
92 vtn_fail("%s: %s (%u)\n", msg, spirv_ ## t ## _to_string(v), v)
94 #define vtn_fail_with_decoration(msg, v) _vtn_fail_with(decoration, msg, v)
95 #define vtn_fail_with_opcode(msg, v) _vtn_fail_with(op, msg, v)
97 /** Assert that a condition is true and, if it isn't, vtn_fail
99 * This macro is transitional only and should not be used in new code. Use
100 * vtn_fail_if and provide a real message instead.
102 #define vtn_assert(expr) \
105 vtn_fail("%s", #expr); \
108 enum vtn_value_type
{
109 vtn_value_type_invalid
= 0,
110 vtn_value_type_undef
,
111 vtn_value_type_string
,
112 vtn_value_type_decoration_group
,
114 vtn_value_type_constant
,
115 vtn_value_type_pointer
,
116 vtn_value_type_function
,
117 vtn_value_type_block
,
119 vtn_value_type_extension
,
120 vtn_value_type_image_pointer
,
121 vtn_value_type_sampled_image
,
124 enum vtn_branch_type
{
125 vtn_branch_type_none
,
126 vtn_branch_type_if_merge
,
127 vtn_branch_type_switch_break
,
128 vtn_branch_type_switch_fallthrough
,
129 vtn_branch_type_loop_break
,
130 vtn_branch_type_loop_continue
,
131 vtn_branch_type_loop_back_edge
,
132 vtn_branch_type_discard
,
133 vtn_branch_type_return
,
136 enum vtn_cf_node_type
{
137 vtn_cf_node_type_block
,
139 vtn_cf_node_type_loop
,
140 vtn_cf_node_type_case
,
141 vtn_cf_node_type_switch
,
142 vtn_cf_node_type_function
,
146 struct list_head link
;
147 struct vtn_cf_node
*parent
;
148 enum vtn_cf_node_type type
;
152 struct vtn_cf_node node
;
154 /* The main body of the loop */
155 struct list_head body
;
157 /* The "continue" part of the loop. This gets executed after the body
158 * and is where you go when you hit a continue.
160 struct list_head cont_body
;
162 struct vtn_block
*header_block
;
163 struct vtn_block
*cont_block
;
164 struct vtn_block
*break_block
;
166 SpvLoopControlMask control
;
170 struct vtn_cf_node node
;
174 enum vtn_branch_type then_type
;
175 struct list_head then_body
;
177 enum vtn_branch_type else_type
;
178 struct list_head else_body
;
180 struct vtn_block
*merge_block
;
182 SpvSelectionControlMask control
;
186 struct vtn_cf_node node
;
188 enum vtn_branch_type type
;
189 struct list_head body
;
191 /* The fallthrough case, if any */
192 struct vtn_case
*fallthrough
;
194 /* The uint32_t values that map to this case */
195 struct util_dynarray values
;
197 /* True if this is the default case */
200 /* Initialized to false; used when sorting the list of cases */
205 struct vtn_cf_node node
;
209 struct list_head cases
;
211 struct vtn_block
*break_block
;
215 struct vtn_cf_node node
;
217 /** A pointer to the label instruction */
218 const uint32_t *label
;
220 /** A pointer to the merge instruction (or NULL if non exists) */
221 const uint32_t *merge
;
223 /** A pointer to the branch instruction that ends this block */
224 const uint32_t *branch
;
226 enum vtn_branch_type branch_type
;
228 /* The CF node for which this is a merge target
230 * The SPIR-V spec requires that any given block can be the merge target
231 * for at most one merge instruction. If this block is a merge target,
232 * this points back to the block containing that merge instruction.
234 struct vtn_cf_node
*merge_cf_node
;
236 /** Points to the loop that this block starts (if it starts a loop) */
237 struct vtn_loop
*loop
;
239 /** Points to the switch case started by this block (if any) */
240 struct vtn_case
*switch_case
;
242 /** Every block ends in a nop intrinsic so that we can find it again */
243 nir_intrinsic_instr
*end_nop
;
246 struct vtn_function
{
247 struct vtn_cf_node node
;
249 struct vtn_type
*type
;
254 nir_function_impl
*impl
;
255 struct vtn_block
*start_block
;
257 struct list_head body
;
261 SpvFunctionControlMask control
;
264 #define VTN_DECL_CF_NODE_CAST(_type) \
265 static inline struct vtn_##_type * \
266 vtn_cf_node_as_##_type(struct vtn_cf_node *node) \
268 assert(node->type == vtn_cf_node_type_##_type); \
269 return (struct vtn_##_type *)node; \
272 VTN_DECL_CF_NODE_CAST(block
)
273 VTN_DECL_CF_NODE_CAST(loop
)
274 VTN_DECL_CF_NODE_CAST(if)
275 VTN_DECL_CF_NODE_CAST(case)
276 VTN_DECL_CF_NODE_CAST(switch)
277 VTN_DECL_CF_NODE_CAST(function
)
279 #define vtn_foreach_cf_node(node, cf_list) \
280 list_for_each_entry(struct vtn_cf_node, node, cf_list, link)
282 typedef bool (*vtn_instruction_handler
)(struct vtn_builder
*, SpvOp
,
283 const uint32_t *, unsigned);
285 void vtn_build_cfg(struct vtn_builder
*b
, const uint32_t *words
,
286 const uint32_t *end
);
287 void vtn_function_emit(struct vtn_builder
*b
, struct vtn_function
*func
,
288 vtn_instruction_handler instruction_handler
);
289 void vtn_handle_function_call(struct vtn_builder
*b
, SpvOp opcode
,
290 const uint32_t *w
, unsigned count
);
293 vtn_foreach_instruction(struct vtn_builder
*b
, const uint32_t *start
,
294 const uint32_t *end
, vtn_instruction_handler handler
);
296 struct vtn_ssa_value
{
299 struct vtn_ssa_value
**elems
;
302 /* For matrices, if this is non-NULL, then this value is actually the
303 * transpose of some other value. The value that `transposed` points to
304 * always dominates this value.
306 struct vtn_ssa_value
*transposed
;
308 const struct glsl_type
*type
;
313 vtn_base_type_scalar
,
314 vtn_base_type_vector
,
315 vtn_base_type_matrix
,
317 vtn_base_type_struct
,
318 vtn_base_type_pointer
,
320 vtn_base_type_sampler
,
321 vtn_base_type_sampled_image
,
322 vtn_base_type_function
,
326 enum vtn_base_type base_type
;
328 const struct glsl_type
*type
;
330 /* The SPIR-V id of the given type. */
333 /* Specifies the length of complex types.
335 * For Workgroup pointers, this is the size of the referenced type.
339 /* for arrays, matrices and pointers, the array stride */
342 /* Access qualifiers */
343 enum gl_access_qualifier access
;
346 /* Members for scalar, vector, and array-like types */
348 /* for arrays, the vtn_type for the elements of the array */
349 struct vtn_type
*array_element
;
351 /* for matrices, whether the matrix is stored row-major */
354 /* Whether this type, or a parent type, has been decorated as a
359 /* Which built-in to use */
363 /* Members for struct types */
365 /* for structures, the vtn_type for each member */
366 struct vtn_type
**members
;
368 /* for structs, the offset of each member */
371 /* for structs, whether it was decorated as a "non-SSBO-like" block */
374 /* for structs, whether it was decorated as an "SSBO-like" block */
377 /* for structs with block == true, whether this is a builtin block
378 * (i.e. a block that contains only builtins).
380 bool builtin_block
:1;
382 /* for structs and unions it specifies the minimum alignment of the
383 * members. 0 means packed.
385 * Set by CPacked and Alignment Decorations in kernels.
390 /* Members for pointer types */
392 /* For pointers, the vtn_type for dereferenced type */
393 struct vtn_type
*deref
;
395 /* Storage class for pointers */
396 SpvStorageClass storage_class
;
398 /* Required alignment for pointers */
402 /* Members for image types */
404 /* Image format for image_load_store type images */
405 unsigned image_format
;
407 /* Access qualifier for storage images */
408 SpvAccessQualifier access_qualifier
;
411 /* Members for sampled image types */
413 /* For sampled images, the image type */
414 struct vtn_type
*image
;
417 /* Members for function types */
419 /* For functions, the vtn_type for each parameter */
420 struct vtn_type
**params
;
422 /* Return type for functions */
423 struct vtn_type
*return_type
;
428 bool vtn_type_contains_block(struct vtn_builder
*b
, struct vtn_type
*type
);
430 bool vtn_types_compatible(struct vtn_builder
*b
,
431 struct vtn_type
*t1
, struct vtn_type
*t2
);
433 struct vtn_type
*vtn_type_without_array(struct vtn_type
*type
);
437 enum vtn_access_mode
{
439 vtn_access_mode_literal
,
442 struct vtn_access_link
{
443 enum vtn_access_mode mode
;
447 struct vtn_access_chain
{
450 /** Whether or not to treat the base pointer as an array. This is only
451 * true if this access chain came from an OpPtrAccessChain.
455 /* Access qualifiers */
456 enum gl_access_qualifier access
;
458 /** Struct elements and array offsets.
460 * This is an array of 1 so that it can conveniently be created on the
461 * stack but the real length is given by the length field.
463 struct vtn_access_link link
[1];
466 enum vtn_variable_mode
{
467 vtn_variable_mode_function
,
468 vtn_variable_mode_private
,
469 vtn_variable_mode_uniform
,
470 vtn_variable_mode_ubo
,
471 vtn_variable_mode_ssbo
,
472 vtn_variable_mode_phys_ssbo
,
473 vtn_variable_mode_push_constant
,
474 vtn_variable_mode_workgroup
,
475 vtn_variable_mode_cross_workgroup
,
476 vtn_variable_mode_input
,
477 vtn_variable_mode_output
,
478 vtn_variable_mode_image
,
482 /** The variable mode for the referenced data */
483 enum vtn_variable_mode mode
;
485 /** The dereferenced type of this pointer */
486 struct vtn_type
*type
;
488 /** The pointer type of this pointer
490 * This may be NULL for some temporary pointers constructed as part of a
491 * large load, store, or copy. It MUST be valid for all pointers which are
492 * stored as SPIR-V SSA values.
494 struct vtn_type
*ptr_type
;
496 /** The referenced variable, if known
498 * This field may be NULL if the pointer uses a (block_index, offset) pair
499 * instead of an access chain or if the access chain starts at a deref.
501 struct vtn_variable
*var
;
503 /** The NIR deref corresponding to this pointer */
504 nir_deref_instr
*deref
;
506 /** A (block_index, offset) pair representing a UBO or SSBO position. */
507 struct nir_ssa_def
*block_index
;
508 struct nir_ssa_def
*offset
;
510 /* Access qualifiers */
511 enum gl_access_qualifier access
;
514 bool vtn_mode_uses_ssa_offset(struct vtn_builder
*b
,
515 enum vtn_variable_mode mode
);
517 static inline bool vtn_pointer_uses_ssa_offset(struct vtn_builder
*b
,
518 struct vtn_pointer
*ptr
)
520 return vtn_mode_uses_ssa_offset(b
, ptr
->mode
);
524 struct vtn_variable
{
525 enum vtn_variable_mode mode
;
527 struct vtn_type
*type
;
529 unsigned descriptor_set
;
531 bool explicit_binding
;
533 unsigned input_attachment_index
;
538 /* If the variable is a struct with a location set on it then this will be
539 * stored here. This will be used to calculate locations for members that
540 * don’t have their own explicit location.
547 * In some early released versions of GLSLang, it implemented all function
548 * calls by making copies of all parameters into temporary variables and
549 * passing those variables into the function. It even did so for samplers
550 * and images which violates the SPIR-V spec. Unfortunately, two games
551 * (Talos Principle and Doom) shipped with this old version of GLSLang and
552 * also happen to pass samplers into functions. Talos Principle received
553 * an update fairly shortly after release with an updated GLSLang. Doom,
554 * on the other hand, has never received an update so we need to work
555 * around this GLSLang issue in SPIR-V -> NIR. Hopefully, we can drop this
556 * hack at some point in the future.
558 struct vtn_pointer
*copy_prop_sampler
;
560 /* Access qualifiers. */
561 enum gl_access_qualifier access
;
564 struct vtn_image_pointer
{
565 struct vtn_pointer
*image
;
571 struct vtn_sampled_image
{
572 struct vtn_pointer
*image
; /* Image or array of images */
573 struct vtn_pointer
*sampler
; /* Sampler */
577 enum vtn_value_type value_type
;
579 struct vtn_decoration
*decoration
;
580 struct vtn_type
*type
;
583 nir_constant
*constant
;
584 struct vtn_pointer
*pointer
;
585 struct vtn_image_pointer
*image
;
586 struct vtn_sampled_image
*sampled_image
;
587 struct vtn_function
*func
;
588 struct vtn_block
*block
;
589 struct vtn_ssa_value
*ssa
;
590 vtn_instruction_handler ext_handler
;
594 #define VTN_DEC_DECORATION -1
595 #define VTN_DEC_EXECUTION_MODE -2
596 #define VTN_DEC_STRUCT_MEMBER0 0
598 struct vtn_decoration
{
599 struct vtn_decoration
*next
;
601 /* Specifies how to apply this decoration. Negative values represent a
602 * decoration or execution mode. (See the VTN_DEC_ #defines above.)
603 * Non-negative values specify that it applies to a structure member.
607 const uint32_t *operands
;
608 struct vtn_value
*group
;
611 SpvDecoration decoration
;
612 SpvExecutionMode exec_mode
;
619 /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
622 const uint32_t *spirv
;
623 size_t spirv_word_count
;
626 struct spirv_to_nir_options
*options
;
627 struct vtn_block
*block
;
629 /* Current offset, file, line, and column. Useful for debugging. Set
630 * automatically by vtn_foreach_instruction.
637 * In SPIR-V, constants are global, whereas in NIR, the load_const
638 * instruction we use is per-function. So while we parse each function, we
639 * keep a hash table of constants we've resolved to nir_ssa_value's so
640 * far, and we lazily resolve them when we see them used in a function.
642 struct hash_table
*const_table
;
645 * Map from phi instructions (pointer to the start of the instruction)
646 * to the variable corresponding to it.
648 struct hash_table
*phi_table
;
650 unsigned num_specializations
;
651 struct nir_spirv_specialization
*specializations
;
653 unsigned value_id_bound
;
654 struct vtn_value
*values
;
656 /* True if we should watch out for GLSLang issue #179 */
659 /* True if we need to fix up CS OpControlBarrier */
660 bool wa_glslang_cs_barrier
;
662 gl_shader_stage entry_point_stage
;
663 const char *entry_point_name
;
664 struct vtn_value
*entry_point
;
665 struct vtn_value
*workgroup_size_builtin
;
666 bool variable_pointers
;
668 struct vtn_function
*func
;
669 struct list_head functions
;
671 /* Current function parameter index */
672 unsigned func_param_idx
;
674 bool has_loop_continue
;
676 /* false by default, set to true by the ContractionOff execution mode */
679 /* when a physical memory model is choosen */
682 /* memory model specified by OpMemoryModel */
687 vtn_pointer_to_ssa(struct vtn_builder
*b
, struct vtn_pointer
*ptr
);
689 vtn_pointer_from_ssa(struct vtn_builder
*b
, nir_ssa_def
*ssa
,
690 struct vtn_type
*ptr_type
);
692 static inline struct vtn_value
*
693 vtn_untyped_value(struct vtn_builder
*b
, uint32_t value_id
)
695 vtn_fail_if(value_id
>= b
->value_id_bound
,
696 "SPIR-V id %u is out-of-bounds", value_id
);
697 return &b
->values
[value_id
];
700 /* Consider not using this function directly and instead use
701 * vtn_push_ssa/vtn_push_pointer so that appropriate applying of
702 * decorations is handled by common code.
704 static inline struct vtn_value
*
705 vtn_push_value(struct vtn_builder
*b
, uint32_t value_id
,
706 enum vtn_value_type value_type
)
708 struct vtn_value
*val
= vtn_untyped_value(b
, value_id
);
710 vtn_fail_if(value_type
== vtn_value_type_ssa
,
711 "Do not call vtn_push_value for value_type_ssa. Use "
712 "vtn_push_ssa_value instead.");
714 vtn_fail_if(val
->value_type
!= vtn_value_type_invalid
,
715 "SPIR-V id %u has already been written by another instruction",
718 val
->value_type
= value_type
;
720 return &b
->values
[value_id
];
723 static inline struct vtn_value
*
724 vtn_value(struct vtn_builder
*b
, uint32_t value_id
,
725 enum vtn_value_type value_type
)
727 struct vtn_value
*val
= vtn_untyped_value(b
, value_id
);
728 vtn_fail_if(val
->value_type
!= value_type
,
729 "SPIR-V id %u is the wrong kind of value", value_id
);
734 vtn_set_instruction_result_type(struct vtn_builder
*b
, SpvOp opcode
,
735 const uint32_t *w
, unsigned count
);
737 static inline uint64_t
738 vtn_constant_uint(struct vtn_builder
*b
, uint32_t value_id
)
740 struct vtn_value
*val
= vtn_value(b
, value_id
, vtn_value_type_constant
);
742 vtn_fail_if(val
->type
->base_type
!= vtn_base_type_scalar
||
743 !glsl_type_is_integer(val
->type
->type
),
744 "Expected id %u to be an integer constant", value_id
);
746 switch (glsl_get_bit_size(val
->type
->type
)) {
747 case 8: return val
->constant
->values
[0].u8
;
748 case 16: return val
->constant
->values
[0].u16
;
749 case 32: return val
->constant
->values
[0].u32
;
750 case 64: return val
->constant
->values
[0].u64
;
751 default: unreachable("Invalid bit size");
755 static inline int64_t
756 vtn_constant_int(struct vtn_builder
*b
, uint32_t value_id
)
758 struct vtn_value
*val
= vtn_value(b
, value_id
, vtn_value_type_constant
);
760 vtn_fail_if(val
->type
->base_type
!= vtn_base_type_scalar
||
761 !glsl_type_is_integer(val
->type
->type
),
762 "Expected id %u to be an integer constant", value_id
);
764 switch (glsl_get_bit_size(val
->type
->type
)) {
765 case 8: return val
->constant
->values
[0].i8
;
766 case 16: return val
->constant
->values
[0].i16
;
767 case 32: return val
->constant
->values
[0].i32
;
768 case 64: return val
->constant
->values
[0].i64
;
769 default: unreachable("Invalid bit size");
773 static inline struct vtn_type
*
774 vtn_get_value_type(struct vtn_builder
*b
, uint32_t value_id
)
776 struct vtn_value
*val
= vtn_untyped_value(b
, value_id
);
777 vtn_fail_if(val
->type
== NULL
, "Value %u does not have a type", value_id
);
781 static inline struct vtn_type
*
782 vtn_get_type(struct vtn_builder
*b
, uint32_t value_id
)
784 return vtn_value(b
, value_id
, vtn_value_type_type
)->type
;
787 struct vtn_ssa_value
*vtn_ssa_value(struct vtn_builder
*b
, uint32_t value_id
);
788 struct vtn_value
*vtn_push_ssa_value(struct vtn_builder
*b
, uint32_t value_id
,
789 struct vtn_ssa_value
*ssa
);
791 nir_ssa_def
*vtn_get_nir_ssa(struct vtn_builder
*b
, uint32_t value_id
);
792 struct vtn_value
*vtn_push_nir_ssa(struct vtn_builder
*b
, uint32_t value_id
,
795 struct vtn_value
*vtn_push_pointer(struct vtn_builder
*b
,
797 struct vtn_pointer
*ptr
);
800 vtn_copy_value(struct vtn_builder
*b
, uint32_t src_value_id
,
801 uint32_t dst_value_id
);
803 struct vtn_ssa_value
*vtn_create_ssa_value(struct vtn_builder
*b
,
804 const struct glsl_type
*type
);
806 struct vtn_ssa_value
*vtn_ssa_transpose(struct vtn_builder
*b
,
807 struct vtn_ssa_value
*src
);
809 nir_deref_instr
*vtn_nir_deref(struct vtn_builder
*b
, uint32_t id
);
811 nir_deref_instr
*vtn_pointer_to_deref(struct vtn_builder
*b
,
812 struct vtn_pointer
*ptr
);
814 vtn_pointer_to_offset(struct vtn_builder
*b
, struct vtn_pointer
*ptr
,
815 nir_ssa_def
**index_out
);
817 struct vtn_ssa_value
*
818 vtn_local_load(struct vtn_builder
*b
, nir_deref_instr
*src
,
819 enum gl_access_qualifier access
);
821 void vtn_local_store(struct vtn_builder
*b
, struct vtn_ssa_value
*src
,
822 nir_deref_instr
*dest
,
823 enum gl_access_qualifier access
);
825 struct vtn_ssa_value
*
826 vtn_variable_load(struct vtn_builder
*b
, struct vtn_pointer
*src
);
828 void vtn_variable_store(struct vtn_builder
*b
, struct vtn_ssa_value
*src
,
829 struct vtn_pointer
*dest
);
831 void vtn_handle_variables(struct vtn_builder
*b
, SpvOp opcode
,
832 const uint32_t *w
, unsigned count
);
835 typedef void (*vtn_decoration_foreach_cb
)(struct vtn_builder
*,
838 const struct vtn_decoration
*,
841 void vtn_foreach_decoration(struct vtn_builder
*b
, struct vtn_value
*value
,
842 vtn_decoration_foreach_cb cb
, void *data
);
844 typedef void (*vtn_execution_mode_foreach_cb
)(struct vtn_builder
*,
846 const struct vtn_decoration
*,
849 void vtn_foreach_execution_mode(struct vtn_builder
*b
, struct vtn_value
*value
,
850 vtn_execution_mode_foreach_cb cb
, void *data
);
852 nir_op
vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder
*b
,
853 SpvOp opcode
, bool *swap
,
854 unsigned src_bit_size
, unsigned dst_bit_size
);
856 void vtn_handle_alu(struct vtn_builder
*b
, SpvOp opcode
,
857 const uint32_t *w
, unsigned count
);
859 void vtn_handle_bitcast(struct vtn_builder
*b
, const uint32_t *w
,
862 void vtn_handle_subgroup(struct vtn_builder
*b
, SpvOp opcode
,
863 const uint32_t *w
, unsigned count
);
865 bool vtn_handle_glsl450_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
866 const uint32_t *words
, unsigned count
);
868 bool vtn_handle_opencl_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
869 const uint32_t *words
, unsigned count
);
871 struct vtn_builder
* vtn_create_builder(const uint32_t *words
, size_t word_count
,
872 gl_shader_stage stage
, const char *entry_point_name
,
873 const struct spirv_to_nir_options
*options
);
875 void vtn_handle_entry_point(struct vtn_builder
*b
, const uint32_t *w
,
878 void vtn_handle_decoration(struct vtn_builder
*b
, SpvOp opcode
,
879 const uint32_t *w
, unsigned count
);
881 enum vtn_variable_mode
vtn_storage_class_to_mode(struct vtn_builder
*b
,
882 SpvStorageClass
class,
883 struct vtn_type
*interface_type
,
884 nir_variable_mode
*nir_mode_out
);
886 nir_address_format
vtn_mode_to_address_format(struct vtn_builder
*b
,
887 enum vtn_variable_mode
);
889 static inline uint32_t
890 vtn_align_u32(uint32_t v
, uint32_t a
)
892 assert(a
!= 0 && a
== (a
& -((int32_t) a
)));
893 return (v
+ a
- 1) & ~(a
- 1);
896 static inline uint64_t
897 vtn_u64_literal(const uint32_t *w
)
899 return (uint64_t)w
[1] << 32 | w
[0];
902 bool vtn_handle_amd_gcn_shader_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
903 const uint32_t *words
, unsigned count
);
905 bool vtn_handle_amd_shader_ballot_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
906 const uint32_t *w
, unsigned count
);
908 bool vtn_handle_amd_shader_trinary_minmax_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
909 const uint32_t *words
, unsigned count
);
911 bool vtn_handle_amd_shader_explicit_vertex_parameter_instruction(struct vtn_builder
*b
,
913 const uint32_t *words
,
916 SpvMemorySemanticsMask
vtn_storage_class_to_memory_semantics(SpvStorageClass sc
);
918 void vtn_emit_memory_barrier(struct vtn_builder
*b
, SpvScope scope
,
919 SpvMemorySemanticsMask semantics
);
921 #endif /* _VTN_PRIVATE_H_ */