2 * Copyright © 2015 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24 * Jason Ekstrand (jason@jlekstrand.net)
28 #ifndef _VTN_PRIVATE_H_
29 #define _VTN_PRIVATE_H_
34 #include "nir/nir_builder.h"
35 #include "util/u_dynarray.h"
36 #include "nir_spirv.h"
40 struct vtn_decoration
;
42 void vtn_log(struct vtn_builder
*b
, enum nir_spirv_debug_level level
,
43 size_t spirv_offset
, const char *message
);
45 void vtn_logf(struct vtn_builder
*b
, enum nir_spirv_debug_level level
,
46 size_t spirv_offset
, const char *fmt
, ...) PRINTFLIKE(4, 5);
48 #define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
50 void _vtn_warn(struct vtn_builder
*b
, const char *file
, unsigned line
,
51 const char *fmt
, ...) PRINTFLIKE(4, 5);
52 #define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
54 /** Fail SPIR-V parsing
56 * This function logs an error and then bails out of the shader compile using
57 * longjmp. This being safe relies on two things:
59 * 1) We must guarantee that setjmp is called after allocating the builder
60 * and setting up b->debug (so that logging works) but before before any
61 * errors have a chance to occur.
63 * 2) While doing the SPIR-V -> NIR conversion, we need to be careful to
64 * ensure that all heap allocations happen through ralloc and are parented
65 * to the builder. This way they will get properly cleaned up on error.
67 * 3) We must ensure that _vtn_fail is never called while a mutex lock or a
68 * reference to any other resource is held with the exception of ralloc
69 * objects which are parented to the builder.
71 * So long as these two things continue to hold, we can easily longjmp back to
72 * spirv_to_nir(), clean up the builder, and return NULL.
75 _vtn_fail(struct vtn_builder
*b
, const char *file
, unsigned line
,
76 const char *fmt
, ...) PRINTFLIKE(4, 5);
78 #define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
80 /** Fail if the given expression evaluates to true */
81 #define vtn_fail_if(expr, ...) \
84 vtn_fail(__VA_ARGS__); \
87 /** Assert that a condition is true and, if it isn't, vtn_fail
89 * This macro is transitional only and should not be used in new code. Use
90 * vtn_fail_if and provide a real message instead.
92 #define vtn_assert(expr) \
95 vtn_fail("%s", #expr); \
99 vtn_value_type_invalid
= 0,
100 vtn_value_type_undef
,
101 vtn_value_type_string
,
102 vtn_value_type_decoration_group
,
104 vtn_value_type_constant
,
105 vtn_value_type_pointer
,
106 vtn_value_type_function
,
107 vtn_value_type_block
,
109 vtn_value_type_extension
,
110 vtn_value_type_image_pointer
,
111 vtn_value_type_sampled_image
,
114 enum vtn_branch_type
{
115 vtn_branch_type_none
,
116 vtn_branch_type_switch_break
,
117 vtn_branch_type_switch_fallthrough
,
118 vtn_branch_type_loop_break
,
119 vtn_branch_type_loop_continue
,
120 vtn_branch_type_discard
,
121 vtn_branch_type_return
,
124 enum vtn_cf_node_type
{
125 vtn_cf_node_type_block
,
127 vtn_cf_node_type_loop
,
128 vtn_cf_node_type_switch
,
132 struct list_head link
;
133 enum vtn_cf_node_type type
;
137 struct vtn_cf_node node
;
139 /* The main body of the loop */
140 struct list_head body
;
142 /* The "continue" part of the loop. This gets executed after the body
143 * and is where you go when you hit a continue.
145 struct list_head cont_body
;
147 SpvLoopControlMask control
;
151 struct vtn_cf_node node
;
155 enum vtn_branch_type then_type
;
156 struct list_head then_body
;
158 enum vtn_branch_type else_type
;
159 struct list_head else_body
;
161 SpvSelectionControlMask control
;
165 struct list_head link
;
167 struct list_head body
;
169 /* The block that starts this case */
170 struct vtn_block
*start_block
;
172 /* The fallthrough case, if any */
173 struct vtn_case
*fallthrough
;
175 /* The uint32_t values that map to this case */
176 struct util_dynarray values
;
178 /* True if this is the default case */
181 /* Initialized to false; used when sorting the list of cases */
186 struct vtn_cf_node node
;
190 struct list_head cases
;
194 struct vtn_cf_node node
;
196 /** A pointer to the label instruction */
197 const uint32_t *label
;
199 /** A pointer to the merge instruction (or NULL if non exists) */
200 const uint32_t *merge
;
202 /** A pointer to the branch instruction that ends this block */
203 const uint32_t *branch
;
205 enum vtn_branch_type branch_type
;
207 /** Points to the loop that this block starts (if it starts a loop) */
208 struct vtn_loop
*loop
;
210 /** Points to the switch case started by this block (if any) */
211 struct vtn_case
*switch_case
;
213 /** Every block ends in a nop intrinsic so that we can find it again */
214 nir_intrinsic_instr
*end_nop
;
217 struct vtn_function
{
218 struct exec_node node
;
223 nir_function_impl
*impl
;
224 struct vtn_block
*start_block
;
226 struct list_head body
;
230 SpvFunctionControlMask control
;
233 typedef bool (*vtn_instruction_handler
)(struct vtn_builder
*, SpvOp
,
234 const uint32_t *, unsigned);
236 void vtn_build_cfg(struct vtn_builder
*b
, const uint32_t *words
,
237 const uint32_t *end
);
238 void vtn_function_emit(struct vtn_builder
*b
, struct vtn_function
*func
,
239 vtn_instruction_handler instruction_handler
);
242 vtn_foreach_instruction(struct vtn_builder
*b
, const uint32_t *start
,
243 const uint32_t *end
, vtn_instruction_handler handler
);
245 struct vtn_ssa_value
{
248 struct vtn_ssa_value
**elems
;
251 /* For matrices, if this is non-NULL, then this value is actually the
252 * transpose of some other value. The value that `transposed` points to
253 * always dominates this value.
255 struct vtn_ssa_value
*transposed
;
257 const struct glsl_type
*type
;
262 vtn_base_type_scalar
,
263 vtn_base_type_vector
,
264 vtn_base_type_matrix
,
266 vtn_base_type_struct
,
267 vtn_base_type_pointer
,
269 vtn_base_type_sampler
,
270 vtn_base_type_sampled_image
,
271 vtn_base_type_function
,
275 enum vtn_base_type base_type
;
277 const struct glsl_type
*type
;
279 /* The SPIR-V id of the given type. */
282 /* Specifies the length of complex types.
284 * For Workgroup pointers, this is the size of the referenced type.
288 /* for arrays, matrices and pointers, the array stride */
292 /* Members for scalar, vector, and array-like types */
294 /* for arrays, the vtn_type for the elements of the array */
295 struct vtn_type
*array_element
;
297 /* for matrices, whether the matrix is stored row-major */
300 /* Whether this type, or a parent type, has been decorated as a
305 /* Which built-in to use */
309 /* Members for struct types */
311 /* for structures, the vtn_type for each member */
312 struct vtn_type
**members
;
314 /* for structs, the offset of each member */
317 /* for structs, whether it was decorated as a "non-SSBO-like" block */
320 /* for structs, whether it was decorated as an "SSBO-like" block */
323 /* for structs with block == true, whether this is a builtin block
324 * (i.e. a block that contains only builtins).
326 bool builtin_block
:1;
329 /* Members for pointer types */
331 /* For pointers, the vtn_type for dereferenced type */
332 struct vtn_type
*deref
;
334 /* Storage class for pointers */
335 SpvStorageClass storage_class
;
337 /* Required alignment for pointers */
341 /* Members for image types */
343 /* For images, indicates whether it's sampled or storage */
346 /* Image format for image_load_store type images */
347 unsigned image_format
;
349 /* Access qualifier for storage images */
350 SpvAccessQualifier access_qualifier
;
353 /* Members for sampled image types */
355 /* For sampled images, the image type */
356 struct vtn_type
*image
;
359 /* Members for function types */
361 /* For functions, the vtn_type for each parameter */
362 struct vtn_type
**params
;
364 /* Return type for functions */
365 struct vtn_type
*return_type
;
370 bool vtn_types_compatible(struct vtn_builder
*b
,
371 struct vtn_type
*t1
, struct vtn_type
*t2
);
375 enum vtn_access_mode
{
377 vtn_access_mode_literal
,
380 struct vtn_access_link
{
381 enum vtn_access_mode mode
;
385 struct vtn_access_chain
{
388 /** Whether or not to treat the base pointer as an array. This is only
389 * true if this access chain came from an OpPtrAccessChain.
393 /** Struct elements and array offsets.
395 * This is an array of 1 so that it can conveniently be created on the
396 * stack but the real length is given by the length field.
398 struct vtn_access_link link
[1];
401 enum vtn_variable_mode
{
402 vtn_variable_mode_local
,
403 vtn_variable_mode_global
,
404 vtn_variable_mode_param
,
405 vtn_variable_mode_ubo
,
406 vtn_variable_mode_ssbo
,
407 vtn_variable_mode_push_constant
,
408 vtn_variable_mode_image
,
409 vtn_variable_mode_sampler
,
410 vtn_variable_mode_workgroup
,
411 vtn_variable_mode_input
,
412 vtn_variable_mode_output
,
416 /** The variable mode for the referenced data */
417 enum vtn_variable_mode mode
;
419 /** The dereferenced type of this pointer */
420 struct vtn_type
*type
;
422 /** The pointer type of this pointer
424 * This may be NULL for some temporary pointers constructed as part of a
425 * large load, store, or copy. It MUST be valid for all pointers which are
426 * stored as SPIR-V SSA values.
428 struct vtn_type
*ptr_type
;
430 /** The referenced variable, if known
432 * This field may be NULL if the pointer uses a (block_index, offset) pair
433 * instead of an access chain.
435 struct vtn_variable
*var
;
437 /** An access chain describing how to get from var to the referenced data
439 * This field may be NULL if the pointer references the entire variable or
440 * if a (block_index, offset) pair is used instead of an access chain.
442 struct vtn_access_chain
*chain
;
444 /** A (block_index, offset) pair representing a UBO or SSBO position. */
445 struct nir_ssa_def
*block_index
;
446 struct nir_ssa_def
*offset
;
449 struct vtn_variable
{
450 enum vtn_variable_mode mode
;
452 struct vtn_type
*type
;
454 unsigned descriptor_set
;
456 unsigned input_attachment_index
;
460 nir_variable
**members
;
465 * In some early released versions of GLSLang, it implemented all function
466 * calls by making copies of all parameters into temporary variables and
467 * passing those variables into the function. It even did so for samplers
468 * and images which violates the SPIR-V spec. Unfortunately, two games
469 * (Talos Principle and Doom) shipped with this old version of GLSLang and
470 * also happen to pass samplers into functions. Talos Principle received
471 * an update fairly shortly after release with an updated GLSLang. Doom,
472 * on the other hand, has never received an update so we need to work
473 * around this GLSLang issue in SPIR-V -> NIR. Hopefully, we can drop this
474 * hack at some point in the future.
476 struct vtn_pointer
*copy_prop_sampler
;
479 struct vtn_image_pointer
{
480 struct vtn_pointer
*image
;
485 struct vtn_sampled_image
{
486 struct vtn_type
*type
;
487 struct vtn_pointer
*image
; /* Image or array of images */
488 struct vtn_pointer
*sampler
; /* Sampler */
492 enum vtn_value_type value_type
;
494 struct vtn_decoration
*decoration
;
495 struct vtn_type
*type
;
499 nir_constant
*constant
;
500 struct vtn_pointer
*pointer
;
501 struct vtn_image_pointer
*image
;
502 struct vtn_sampled_image
*sampled_image
;
503 struct vtn_function
*func
;
504 struct vtn_block
*block
;
505 struct vtn_ssa_value
*ssa
;
506 vtn_instruction_handler ext_handler
;
510 #define VTN_DEC_DECORATION -1
511 #define VTN_DEC_EXECUTION_MODE -2
512 #define VTN_DEC_STRUCT_MEMBER0 0
514 struct vtn_decoration
{
515 struct vtn_decoration
*next
;
517 /* Specifies how to apply this decoration. Negative values represent a
518 * decoration or execution mode. (See the VTN_DEC_ #defines above.)
519 * Non-negative values specify that it applies to a structure member.
523 const uint32_t *literals
;
524 struct vtn_value
*group
;
527 SpvDecoration decoration
;
528 SpvExecutionMode exec_mode
;
535 /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
538 const uint32_t *spirv
;
539 size_t spirv_word_count
;
542 const struct spirv_to_nir_options
*options
;
543 struct vtn_block
*block
;
545 /* Current offset, file, line, and column. Useful for debugging. Set
546 * automatically by vtn_foreach_instruction.
553 * In SPIR-V, constants are global, whereas in NIR, the load_const
554 * instruction we use is per-function. So while we parse each function, we
555 * keep a hash table of constants we've resolved to nir_ssa_value's so
556 * far, and we lazily resolve them when we see them used in a function.
558 struct hash_table
*const_table
;
561 * Map from phi instructions (pointer to the start of the instruction)
562 * to the variable corresponding to it.
564 struct hash_table
*phi_table
;
566 unsigned num_specializations
;
567 struct nir_spirv_specialization
*specializations
;
569 unsigned value_id_bound
;
570 struct vtn_value
*values
;
572 gl_shader_stage entry_point_stage
;
573 const char *entry_point_name
;
574 struct vtn_value
*entry_point
;
575 bool origin_upper_left
;
576 bool pixel_center_integer
;
578 struct vtn_function
*func
;
579 struct exec_list functions
;
581 /* Current function parameter index */
582 unsigned func_param_idx
;
584 bool has_loop_continue
;
588 vtn_pointer_to_ssa(struct vtn_builder
*b
, struct vtn_pointer
*ptr
);
590 vtn_pointer_from_ssa(struct vtn_builder
*b
, nir_ssa_def
*ssa
,
591 struct vtn_type
*ptr_type
);
593 static inline struct vtn_value
*
594 vtn_untyped_value(struct vtn_builder
*b
, uint32_t value_id
)
596 vtn_fail_if(value_id
>= b
->value_id_bound
,
597 "SPIR-V id %u is out-of-bounds", value_id
);
598 return &b
->values
[value_id
];
601 static inline struct vtn_value
*
602 vtn_push_value(struct vtn_builder
*b
, uint32_t value_id
,
603 enum vtn_value_type value_type
)
605 struct vtn_value
*val
= vtn_untyped_value(b
, value_id
);
607 vtn_fail_if(val
->value_type
!= vtn_value_type_invalid
,
608 "SPIR-V id %u has already been written by another instruction",
611 val
->value_type
= value_type
;
612 return &b
->values
[value_id
];
615 static inline struct vtn_value
*
616 vtn_push_ssa(struct vtn_builder
*b
, uint32_t value_id
,
617 struct vtn_type
*type
, struct vtn_ssa_value
*ssa
)
619 struct vtn_value
*val
;
620 if (type
->base_type
== vtn_base_type_pointer
) {
621 val
= vtn_push_value(b
, value_id
, vtn_value_type_pointer
);
622 val
->pointer
= vtn_pointer_from_ssa(b
, ssa
->def
, type
);
624 val
= vtn_push_value(b
, value_id
, vtn_value_type_ssa
);
630 static inline struct vtn_value
*
631 vtn_value(struct vtn_builder
*b
, uint32_t value_id
,
632 enum vtn_value_type value_type
)
634 struct vtn_value
*val
= vtn_untyped_value(b
, value_id
);
635 vtn_fail_if(val
->value_type
!= value_type
,
636 "SPIR-V id %u is the wrong kind of value", value_id
);
641 vtn_set_instruction_result_type(struct vtn_builder
*b
, SpvOp opcode
,
642 const uint32_t *w
, unsigned count
);
644 static inline nir_constant
*
645 vtn_constant_value(struct vtn_builder
*b
, uint32_t value_id
)
647 return vtn_value(b
, value_id
, vtn_value_type_constant
)->constant
;
650 struct vtn_ssa_value
*vtn_ssa_value(struct vtn_builder
*b
, uint32_t value_id
);
652 struct vtn_ssa_value
*vtn_create_ssa_value(struct vtn_builder
*b
,
653 const struct glsl_type
*type
);
655 struct vtn_ssa_value
*vtn_ssa_transpose(struct vtn_builder
*b
,
656 struct vtn_ssa_value
*src
);
658 nir_ssa_def
*vtn_vector_extract(struct vtn_builder
*b
, nir_ssa_def
*src
,
660 nir_ssa_def
*vtn_vector_extract_dynamic(struct vtn_builder
*b
, nir_ssa_def
*src
,
662 nir_ssa_def
*vtn_vector_insert(struct vtn_builder
*b
, nir_ssa_def
*src
,
663 nir_ssa_def
*insert
, unsigned index
);
664 nir_ssa_def
*vtn_vector_insert_dynamic(struct vtn_builder
*b
, nir_ssa_def
*src
,
665 nir_ssa_def
*insert
, nir_ssa_def
*index
);
667 nir_deref_var
*vtn_nir_deref(struct vtn_builder
*b
, uint32_t id
);
669 struct vtn_pointer
*vtn_pointer_for_variable(struct vtn_builder
*b
,
670 struct vtn_variable
*var
,
671 struct vtn_type
*ptr_type
);
673 nir_deref_var
*vtn_pointer_to_deref(struct vtn_builder
*b
,
674 struct vtn_pointer
*ptr
);
676 vtn_pointer_to_offset(struct vtn_builder
*b
, struct vtn_pointer
*ptr
,
677 nir_ssa_def
**index_out
, unsigned *end_idx_out
);
679 struct vtn_ssa_value
*vtn_local_load(struct vtn_builder
*b
, nir_deref_var
*src
);
681 void vtn_local_store(struct vtn_builder
*b
, struct vtn_ssa_value
*src
,
682 nir_deref_var
*dest
);
684 struct vtn_ssa_value
*
685 vtn_variable_load(struct vtn_builder
*b
, struct vtn_pointer
*src
);
687 void vtn_variable_store(struct vtn_builder
*b
, struct vtn_ssa_value
*src
,
688 struct vtn_pointer
*dest
);
690 void vtn_handle_variables(struct vtn_builder
*b
, SpvOp opcode
,
691 const uint32_t *w
, unsigned count
);
694 typedef void (*vtn_decoration_foreach_cb
)(struct vtn_builder
*,
697 const struct vtn_decoration
*,
700 void vtn_foreach_decoration(struct vtn_builder
*b
, struct vtn_value
*value
,
701 vtn_decoration_foreach_cb cb
, void *data
);
703 typedef void (*vtn_execution_mode_foreach_cb
)(struct vtn_builder
*,
705 const struct vtn_decoration
*,
708 void vtn_foreach_execution_mode(struct vtn_builder
*b
, struct vtn_value
*value
,
709 vtn_execution_mode_foreach_cb cb
, void *data
);
711 nir_op
vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder
*b
,
712 SpvOp opcode
, bool *swap
,
713 unsigned src_bit_size
, unsigned dst_bit_size
);
715 void vtn_handle_alu(struct vtn_builder
*b
, SpvOp opcode
,
716 const uint32_t *w
, unsigned count
);
718 void vtn_handle_subgroup(struct vtn_builder
*b
, SpvOp opcode
,
719 const uint32_t *w
, unsigned count
);
721 bool vtn_handle_glsl450_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
722 const uint32_t *words
, unsigned count
);
724 struct vtn_builder
* vtn_create_builder(const uint32_t *words
, size_t word_count
,
725 gl_shader_stage stage
, const char *entry_point_name
,
726 const struct spirv_to_nir_options
*options
);
728 void vtn_handle_entry_point(struct vtn_builder
*b
, const uint32_t *w
,
731 void vtn_handle_decoration(struct vtn_builder
*b
, SpvOp opcode
,
732 const uint32_t *w
, unsigned count
);
734 static inline uint32_t
735 vtn_align_u32(uint32_t v
, uint32_t a
)
737 assert(a
!= 0 && a
== (a
& -((int32_t) a
)));
738 return (v
+ a
- 1) & ~(a
- 1);
741 static inline uint64_t
742 vtn_u64_literal(const uint32_t *w
)
744 return (uint64_t)w
[1] << 32 | w
[0];
747 bool vtn_handle_amd_gcn_shader_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
748 const uint32_t *words
, unsigned count
);
750 bool vtn_handle_amd_shader_trinary_minmax_instruction(struct vtn_builder
*b
, SpvOp ext_opcode
,
751 const uint32_t *words
, unsigned count
);
752 #endif /* _VTN_PRIVATE_H_ */