2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Jason Ekstrand (jason@jlekstrand.net)
28 #ifndef _VTN_PRIVATE_H_
29 #define _VTN_PRIVATE_H_
34 #include "nir/nir_builder.h"
35 #include "util/u_dynarray.h"
36 #include "nir_spirv.h"
40 struct vtn_decoration
;
42 void vtn_log(struct vtn_builder
*b
, enum nir_spirv_debug_level level
,
43 size_t spirv_offset
, const char *message
);
45 void vtn_logf(struct vtn_builder
*b
, enum nir_spirv_debug_level level
,
46 size_t spirv_offset
, const char *fmt
, ...) PRINTFLIKE(4, 5);
48 #define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
50 void _vtn_warn(struct vtn_builder
*b
, const char *file
, unsigned line
,
51 const char *fmt
, ...) PRINTFLIKE(4, 5);
52 #define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
54 void _vtn_err(struct vtn_builder
*b
, const char *file
, unsigned line
,
55 const char *fmt
, ...) PRINTFLIKE(4, 5);
56 #define vtn_err(...) _vtn_err(b, __FILE__, __LINE__, __VA_ARGS__)
58 /** Fail SPIR-V parsing
60 * This function logs an error and then bails out of the shader compile using
61 * longjmp. This being safe relies on two things:
63 * 1) We must guarantee that setjmp is called after allocating the builder
64 * and setting up b->debug (so that logging works) but before before any
65 * errors have a chance to occur.
67 * 2) While doing the SPIR-V -> NIR conversion, we need to be careful to
68 * ensure that all heap allocations happen through ralloc and are parented
69 * to the builder. This way they will get properly cleaned up on error.
71 * 3) We must ensure that _vtn_fail is never called while a mutex lock or a
72 * reference to any other resource is held with the exception of ralloc
73 * objects which are parented to the builder.
75 * So long as these two things continue to hold, we can easily longjmp back to
76 * spirv_to_nir(), clean up the builder, and return NULL.
79 _vtn_fail(struct vtn_builder
*b
, const char *file
, unsigned line
,
80 const char *fmt
, ...) PRINTFLIKE(4, 5);
82 #define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
84 /** Fail if the given expression evaluates to true */
85 #define vtn_fail_if(expr, ...) \
88 vtn_fail(__VA_ARGS__); \
91 /** Assert that a condition is true and, if it isn't, vtn_fail
93 * This macro is transitional only and should not be used in new code. Use
94 * vtn_fail_if and provide a real message instead.
96 #define vtn_assert(expr) \
99 vtn_fail("%s", #expr); \
102 enum vtn_value_type
{
103 vtn_value_type_invalid
= 0,
104 vtn_value_type_undef
,
105 vtn_value_type_string
,
106 vtn_value_type_decoration_group
,
108 vtn_value_type_constant
,
109 vtn_value_type_pointer
,
110 vtn_value_type_function
,
111 vtn_value_type_block
,
113 vtn_value_type_extension
,
114 vtn_value_type_image_pointer
,
115 vtn_value_type_sampled_image
,
118 enum vtn_branch_type
{
119 vtn_branch_type_none
,
120 vtn_branch_type_switch_break
,
121 vtn_branch_type_switch_fallthrough
,
122 vtn_branch_type_loop_break
,
123 vtn_branch_type_loop_continue
,
124 vtn_branch_type_discard
,
125 vtn_branch_type_return
,
128 enum vtn_cf_node_type
{
129 vtn_cf_node_type_block
,
131 vtn_cf_node_type_loop
,
132 vtn_cf_node_type_switch
,
136 struct list_head link
;
137 enum vtn_cf_node_type type
;
141 struct vtn_cf_node node
;
143 /* The main body of the loop */
144 struct list_head body
;
146 /* The "continue" part of the loop. This gets executed after the body
147 * and is where you go when you hit a continue.
149 struct list_head cont_body
;
151 SpvLoopControlMask control
;
155 struct vtn_cf_node node
;
159 enum vtn_branch_type then_type
;
160 struct list_head then_body
;
162 enum vtn_branch_type else_type
;
163 struct list_head else_body
;
165 SpvSelectionControlMask control
;
169 struct list_head link
;
171 struct list_head body
;
173 /* The block that starts this case */
174 struct vtn_block
*start_block
;
176 /* The fallthrough case, if any */
177 struct vtn_case
*fallthrough
;
179 /* The uint32_t values that map to this case */
180 struct util_dynarray values
;
182 /* True if this is the default case */
185 /* Initialized to false; used when sorting the list of cases */
190 struct vtn_cf_node node
;
194 struct list_head cases
;
198 struct vtn_cf_node node
;
200 /** A pointer to the label instruction */
201 const uint32_t *label
;
203 /** A pointer to the merge instruction (or NULL if non exists) */
204 const uint32_t *merge
;
206 /** A pointer to the branch instruction that ends this block */
207 const uint32_t *branch
;
209 enum vtn_branch_type branch_type
;
211 /** Points to the loop that this block starts (if it starts a loop) */
212 struct vtn_loop
*loop
;
214 /** Points to the switch case started by this block (if any) */
215 struct vtn_case
*switch_case
;
217 /** Every block ends in a nop intrinsic so that we can find it again */
218 nir_intrinsic_instr
*end_nop
;
221 struct vtn_function
{
222 struct exec_node node
;
227 nir_function_impl
*impl
;
228 struct vtn_block
*start_block
;
230 struct list_head body
;
234 SpvFunctionControlMask control
;
237 typedef bool (*vtn_instruction_handler
)(struct vtn_builder
*, SpvOp
,
238 const uint32_t *, unsigned);
240 void vtn_build_cfg(struct vtn_builder
*b
, const uint32_t *words
,
241 const uint32_t *end
);
242 void vtn_function_emit(struct vtn_builder
*b
, struct vtn_function
*func
,
243 vtn_instruction_handler instruction_handler
);
246 vtn_foreach_instruction(struct vtn_builder
*b
, const uint32_t *start
,
247 const uint32_t *end
, vtn_instruction_handler handler
);
249 struct vtn_ssa_value
{
252 struct vtn_ssa_value
**elems
;
255 /* For matrices, if this is non-NULL, then this value is actually the
256 * transpose of some other value. The value that `transposed` points to
257 * always dominates this value.
259 struct vtn_ssa_value
*transposed
;
261 const struct glsl_type
*type
;
266 vtn_base_type_scalar
,
267 vtn_base_type_vector
,
268 vtn_base_type_matrix
,
270 vtn_base_type_struct
,
271 vtn_base_type_pointer
,
273 vtn_base_type_sampler
,
274 vtn_base_type_sampled_image
,
275 vtn_base_type_function
,
279 enum vtn_base_type base_type
;
281 const struct glsl_type
*type
;
283 /* The SPIR-V id of the given type. */
286 /* Specifies the length of complex types.
288 * For Workgroup pointers, this is the size of the referenced type.
292 /* for arrays, matrices and pointers, the array stride */
296 /* Members for scalar, vector, and array-like types */
298 /* for arrays, the vtn_type for the elements of the array */
299 struct vtn_type
*array_element
;
301 /* for matrices, whether the matrix is stored row-major */
304 /* Whether this type, or a parent type, has been decorated as a
309 /* Which built-in to use */
313 /* Members for struct types */
315 /* for structures, the vtn_type for each member */
316 struct vtn_type
**members
;
318 /* for structs, the offset of each member */
321 /* for structs, whether it was decorated as a "non-SSBO-like" block */
324 /* for structs, whether it was decorated as an "SSBO-like" block */
327 /* for structs with block == true, whether this is a builtin block
328 * (i.e. a block that contains only builtins).
330 bool builtin_block
:1;
333 /* Members for pointer types */
335 /* For pointers, the vtn_type for dereferenced type */
336 struct vtn_type
*deref
;
338 /* Storage class for pointers */
339 SpvStorageClass storage_class
;
341 /* Required alignment for pointers */
345 /* Members for image types */
347 /* For images, indicates whether it's sampled or storage */
350 /* Image format for image_load_store type images */
351 unsigned image_format
;
353 /* Access qualifier for storage images */
354 SpvAccessQualifier access_qualifier
;
357 /* Members for sampled image types */
359 /* For sampled images, the image type */
360 struct vtn_type
*image
;
363 /* Members for function types */
365 /* For functions, the vtn_type for each parameter */
366 struct vtn_type
**params
;
368 /* Return type for functions */
369 struct vtn_type
*return_type
;
374 bool vtn_types_compatible(struct vtn_builder
*b
,
375 struct vtn_type
*t1
, struct vtn_type
*t2
);
379 enum vtn_access_mode
{
381 vtn_access_mode_literal
,
384 struct vtn_access_link
{
385 enum vtn_access_mode mode
;
389 struct vtn_access_chain
{
392 /** Whether or not to treat the base pointer as an array. This is only
393 * true if this access chain came from an OpPtrAccessChain.
397 /** Struct elements and array offsets.
399 * This is an array of 1 so that it can conveniently be created on the
400 * stack but the real length is given by the length field.
402 struct vtn_access_link link
[1];
405 enum vtn_variable_mode
{
406 vtn_variable_mode_local
,
407 vtn_variable_mode_global
,
408 vtn_variable_mode_param
,
409 vtn_variable_mode_uniform
,
410 vtn_variable_mode_ubo
,
411 vtn_variable_mode_ssbo
,
412 vtn_variable_mode_push_constant
,
413 vtn_variable_mode_image
,
414 vtn_variable_mode_sampler
,
415 vtn_variable_mode_workgroup
,
416 vtn_variable_mode_input
,
417 vtn_variable_mode_output
,
421 /** The variable mode for the referenced data */
422 enum vtn_variable_mode mode
;
424 /** The dereferenced type of this pointer */
425 struct vtn_type
*type
;
427 /** The pointer type of this pointer
429 * This may be NULL for some temporary pointers constructed as part of a
430 * large load, store, or copy. It MUST be valid for all pointers which are
431 * stored as SPIR-V SSA values.
433 struct vtn_type
*ptr_type
;
435 /** The referenced variable, if known
437 * This field may be NULL if the pointer uses a (block_index, offset) pair
438 * instead of an access chain.
440 struct vtn_variable
*var
;
442 /** An access chain describing how to get from var to the referenced data
444 * This field may be NULL if the pointer references the entire variable or
445 * if a (block_index, offset) pair is used instead of an access chain.
447 struct vtn_access_chain
*chain
;
449 /** A (block_index, offset) pair representing a UBO or SSBO position. */
450 struct nir_ssa_def
*block_index
;
451 struct nir_ssa_def
*offset
;
454 struct vtn_variable
{
455 enum vtn_variable_mode mode
;
457 struct vtn_type
*type
;
459 unsigned descriptor_set
;
461 unsigned input_attachment_index
;
465 nir_variable
**members
;
470 * In some early released versions of GLSLang, it implemented all function
471 * calls by making copies of all parameters into temporary variables and
472 * passing those variables into the function. It even did so for samplers
473 * and images which violates the SPIR-V spec. Unfortunately, two games
474 * (Talos Principle and Doom) shipped with this old version of GLSLang and
475 * also happen to pass samplers into functions. Talos Principle received
476 * an update fairly shortly after release with an updated GLSLang. Doom,
477 * on the other hand, has never received an update so we need to work
478 * around this GLSLang issue in SPIR-V -> NIR. Hopefully, we can drop this
479 * hack at some point in the future.
481 struct vtn_pointer
*copy_prop_sampler
;
484 struct vtn_image_pointer
{
485 struct vtn_pointer
*image
;
490 struct vtn_sampled_image
{
491 struct vtn_type
*type
;
492 struct vtn_pointer
*image
; /* Image or array of images */
493 struct vtn_pointer
*sampler
; /* Sampler */
497 enum vtn_value_type value_type
;
499 struct vtn_decoration
*decoration
;
500 struct vtn_type
*type
;
504 nir_constant
*constant
;
505 struct vtn_pointer
*pointer
;
506 struct vtn_image_pointer
*image
;
507 struct vtn_sampled_image
*sampled_image
;
508 struct vtn_function
*func
;
509 struct vtn_block
*block
;
510 struct vtn_ssa_value
*ssa
;
511 vtn_instruction_handler ext_handler
;
515 #define VTN_DEC_DECORATION -1
516 #define VTN_DEC_EXECUTION_MODE -2
517 #define VTN_DEC_STRUCT_MEMBER0 0
519 struct vtn_decoration
{
520 struct vtn_decoration
*next
;
522 /* Specifies how to apply this decoration. Negative values represent a
523 * decoration or execution mode. (See the VTN_DEC_ #defines above.)
524 * Non-negative values specify that it applies to a structure member.
528 const uint32_t *literals
;
529 struct vtn_value
*group
;
532 SpvDecoration decoration
;
533 SpvExecutionMode exec_mode
;
540 /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
543 const uint32_t *spirv
;
544 size_t spirv_word_count
;
547 const struct spirv_to_nir_options
*options
;
548 struct vtn_block
*block
;
550 /* Current offset, file, line, and column. Useful for debugging. Set
551 * automatically by vtn_foreach_instruction.
558 * In SPIR-V, constants are global, whereas in NIR, the load_const
559 * instruction we use is per-function. So while we parse each function, we
560 * keep a hash table of constants we've resolved to nir_ssa_value's so
561 * far, and we lazily resolve them when we see them used in a function.
563 struct hash_table
*const_table
;
566 * Map from phi instructions (pointer to the start of the instruction)
567 * to the variable corresponding to it.
569 struct hash_table
*phi_table
;
571 unsigned num_specializations
;
572 struct nir_spirv_specialization
*specializations
;
574 unsigned value_id_bound
;
575 struct vtn_value
*values
;
577 gl_shader_stage entry_point_stage
;
578 const char *entry_point_name
;
579 struct vtn_value
*entry_point
;
580 bool origin_upper_left
;
581 bool pixel_center_integer
;
583 struct vtn_function
*func
;
584 struct exec_list functions
;
586 /* Current function parameter index */
587 unsigned func_param_idx
;
589 bool has_loop_continue
;
593 vtn_pointer_to_ssa(struct vtn_builder
*b
, struct vtn_pointer
*ptr
);
595 vtn_pointer_from_ssa(struct vtn_builder
*b
, nir_ssa_def
*ssa
,
596 struct vtn_type
*ptr_type
);
598 static inline struct vtn_value
*
599 vtn_untyped_value(struct vtn_builder
*b
, uint32_t value_id
)
601 vtn_fail_if(value_id
>= b
->value_id_bound
,
602 "SPIR-V id %u is out-of-bounds", value_id
);
603 return &b
->values
[value_id
];
606 static inline struct vtn_value
*
607 vtn_push_value(struct vtn_builder
*b
, uint32_t value_id
,
608 enum vtn_value_type value_type
)
610 struct vtn_value
*val
= vtn_untyped_value(b
, value_id
);
612 vtn_fail_if(val
->value_type
!= vtn_value_type_invalid
,
613 "SPIR-V id %u has already been written by another instruction",
616 val
->value_type
= value_type
;
617 return &b
->values
[value_id
];
620 static inline struct vtn_value
*
621 vtn_push_ssa(struct vtn_builder
*b
, uint32_t value_id
,
622 struct vtn_type
*type
, struct vtn_ssa_value
*ssa
)
624 struct vtn_value
*val
;
625 if (type
->base_type
== vtn_base_type_pointer
) {
626 val
= vtn_push_value(b
, value_id
, vtn_value_type_pointer
);
627 val
->pointer
= vtn_pointer_from_ssa(b
, ssa
->def
, type
);
629 val
= vtn_push_value(b
, value_id
, vtn_value_type_ssa
);
635 static inline struct vtn_value
*
636 vtn_value(struct vtn_builder
*b
, uint32_t value_id
,
637 enum vtn_value_type value_type
)
639 struct vtn_value
*val
= vtn_untyped_value(b
, value_id
);
640 vtn_fail_if(val
->value_type
!= value_type
,
641 "SPIR-V id %u is the wrong kind of value", value_id
);
646 vtn_set_instruction_result_type(struct vtn_builder
*b
, SpvOp opcode
,
647 const uint32_t *w
, unsigned count
);
649 static inline nir_constant
*
650 vtn_constant_value(struct vtn_builder
*b
, uint32_t value_id
)
652 return vtn_value(b
, value_id
, vtn_value_type_constant
)->constant
;
655 struct vtn_ssa_value
*vtn_ssa_value(struct vtn_builder
*b
, uint32_t value_id
);
657 struct vtn_ssa_value
*vtn_create_ssa_value(struct vtn_builder
*b
,
658 const struct glsl_type
*type
);
660 struct vtn_ssa_value
*vtn_ssa_transpose(struct vtn_builder
*b
,
661 struct vtn_ssa_value
*src
);
663 nir_ssa_def
*vtn_vector_extract(struct vtn_builder
*b
, nir_ssa_def
*src
,
665 nir_ssa_def
*vtn_vector_extract_dynamic(struct vtn_builder
*b
, nir_ssa_def
*src
,
667 nir_ssa_def
*vtn_vector_insert(struct vtn_builder
*b
, nir_ssa_def
*src
,
668 nir_ssa_def
*insert
, unsigned index
);
669 nir_ssa_def
*vtn_vector_insert_dynamic(struct vtn_builder
*b
, nir_ssa_def
*src
,
670 nir_ssa_def
*insert
, nir_ssa_def
*index
);
672 nir_deref_var
*vtn_nir_deref(struct vtn_builder
*b
, uint32_t id
);
674 struct vtn_pointer
*vtn_pointer_for_variable(struct vtn_builder
*b
,
675 struct vtn_variable
*var
,
676 struct vtn_type
*ptr_type
);
678 nir_deref_var
*vtn_pointer_to_deref(struct vtn_builder
*b
,
679 struct vtn_pointer
*ptr
);
681 vtn_pointer_to_offset(struct vtn_builder
*b
, struct vtn_pointer
*ptr
,
682 nir_ssa_def
**index_out
, unsigned *end_idx_out
);
684 struct vtn_ssa_value
*vtn_local_load(struct vtn_builder
*b
, nir_deref_var
*src
);
686 void vtn_local_store(struct vtn_builder
*b
, struct vtn_ssa_value
*src
,
687 nir_deref_var
*dest
);
689 struct vtn_ssa_value
*
690 vtn_variable_load(struct vtn_builder
*b
, struct vtn_pointer
*src
);
692 void vtn_variable_store(struct vtn_builder
*b
, struct vtn_ssa_value
*src
,
693 struct vtn_pointer
*dest
);
695 void vtn_handle_variables(struct vtn_builder
*b
, SpvOp opcode
,
696 const uint32_t *w
, unsigned count
);
699 typedef void (*vtn_decoration_foreach_cb
)(struct vtn_builder
*,
702 const struct vtn_decoration
*,
705 void vtn_foreach_decoration(struct vtn_builder
*b
, struct vtn_value
*value
,
706 vtn_decoration_foreach_cb cb
, void *data
);
708 typedef void (*vtn_execution_mode_foreach_cb
)(struct vtn_builder
*,
710 const struct vtn_decoration
*,
713 void vtn_foreach_execution_mode(struct vtn_builder
*b
, struct vtn_value
*value
,
714 vtn_execution_mode_foreach_cb cb
, void *data
);
716 nir_op
vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder
*b
,
717 SpvOp opcode
, bool *swap
,
718 unsigned src_bit_size
, unsigned dst_bit_size
);
720 void vtn_handle_alu(struct vtn_builder
*b
, SpvOp opcode
,
721 const uint32_t *w
, unsigned count
);
723 void vtn_handle_subgroup(struct vtn_builder
*b
, SpvOp opcode
,
724 const uint32_t *w
, unsigned count
);
726 bool vtn_handle_glsl450_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
727 const uint32_t *words
, unsigned count
);
729 struct vtn_builder
* vtn_create_builder(const uint32_t *words
, size_t word_count
,
730 gl_shader_stage stage
, const char *entry_point_name
,
731 const struct spirv_to_nir_options
*options
);
733 void vtn_handle_entry_point(struct vtn_builder
*b
, const uint32_t *w
,
736 void vtn_handle_decoration(struct vtn_builder
*b
, SpvOp opcode
,
737 const uint32_t *w
, unsigned count
);
739 static inline uint32_t
740 vtn_align_u32(uint32_t v
, uint32_t a
)
742 assert(a
!= 0 && a
== (a
& -((int32_t) a
)));
743 return (v
+ a
- 1) & ~(a
- 1);
746 static inline uint64_t
747 vtn_u64_literal(const uint32_t *w
)
749 return (uint64_t)w
[1] << 32 | w
[0];
752 bool vtn_handle_amd_gcn_shader_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
753 const uint32_t *words
, unsigned count
);
755 bool vtn_handle_amd_shader_trinary_minmax_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
756 const uint32_t *words
, unsigned count
);
757 #endif /* _VTN_PRIVATE_H_ */