+ vtn_assert(vtn_var->mode == vtn_variable_mode_ubo ||
+ vtn_var->mode == vtn_variable_mode_ssbo ||
+ vtn_var->mode == vtn_variable_mode_push_constant);
+ }
+ }
+}
+
+enum vtn_variable_mode
+vtn_storage_class_to_mode(struct vtn_builder *b,
+ SpvStorageClass class,
+ struct vtn_type *interface_type,
+ nir_variable_mode *nir_mode_out)
+{
+ enum vtn_variable_mode mode;
+ nir_variable_mode nir_mode;
+ switch (class) {
+ case SpvStorageClassUniform:
+ /* Assume it's an UBO if we lack the interface_type. */
+ if (!interface_type || interface_type->block) {
+ mode = vtn_variable_mode_ubo;
+ nir_mode = nir_var_mem_ubo;
+ } else if (interface_type->buffer_block) {
+ mode = vtn_variable_mode_ssbo;
+ nir_mode = nir_var_mem_ssbo;
+ } else {
+ /* Default-block uniforms, coming from gl_spirv */
+ mode = vtn_variable_mode_uniform;
+ nir_mode = nir_var_uniform;
+ }
+ break;
+ case SpvStorageClassStorageBuffer:
+ mode = vtn_variable_mode_ssbo;
+ nir_mode = nir_var_mem_ssbo;
+ break;
+ case SpvStorageClassPhysicalStorageBuffer:
+ mode = vtn_variable_mode_phys_ssbo;
+ nir_mode = nir_var_mem_global;
+ break;
+ case SpvStorageClassUniformConstant:
+ if (b->shader->info.stage == MESA_SHADER_KERNEL) {
+ if (b->options->constant_as_global) {
+ mode = vtn_variable_mode_cross_workgroup;
+ nir_mode = nir_var_mem_global;
+ } else {
+ mode = vtn_variable_mode_ubo;
+ nir_mode = nir_var_mem_ubo;
+ }
+ } else {
+ mode = vtn_variable_mode_uniform;
+ nir_mode = nir_var_uniform;
+ }
+ break;
+ case SpvStorageClassPushConstant:
+ mode = vtn_variable_mode_push_constant;
+ nir_mode = nir_var_uniform;
+ break;
+ case SpvStorageClassInput:
+ mode = vtn_variable_mode_input;
+ nir_mode = nir_var_shader_in;
+ break;
+ case SpvStorageClassOutput:
+ mode = vtn_variable_mode_output;
+ nir_mode = nir_var_shader_out;
+ break;
+ case SpvStorageClassPrivate:
+ mode = vtn_variable_mode_private;
+ nir_mode = nir_var_shader_temp;
+ break;
+ case SpvStorageClassFunction:
+ mode = vtn_variable_mode_function;
+ nir_mode = nir_var_function_temp;
+ break;
+ case SpvStorageClassWorkgroup:
+ mode = vtn_variable_mode_workgroup;
+ nir_mode = nir_var_mem_shared;
+ break;
+ case SpvStorageClassAtomicCounter:
+ mode = vtn_variable_mode_uniform;
+ nir_mode = nir_var_uniform;
+ break;
+ case SpvStorageClassCrossWorkgroup:
+ mode = vtn_variable_mode_cross_workgroup;
+ nir_mode = nir_var_mem_global;
+ break;
+ case SpvStorageClassImage:
+ mode = vtn_variable_mode_image;
+ nir_mode = nir_var_mem_ubo;
+ break;
+ case SpvStorageClassGeneric:
+ default:
+ vtn_fail("Unhandled variable storage class: %s (%u)",
+ spirv_storageclass_to_string(class), class);
+ }
+
+ if (nir_mode_out)
+ *nir_mode_out = nir_mode;
+
+ return mode;
+}
+
+nir_address_format
+vtn_mode_to_address_format(struct vtn_builder *b, enum vtn_variable_mode mode)
+{
+ switch (mode) {
+ case vtn_variable_mode_ubo:
+ return b->options->ubo_addr_format;
+
+ case vtn_variable_mode_ssbo:
+ return b->options->ssbo_addr_format;
+
+ case vtn_variable_mode_phys_ssbo:
+ return b->options->phys_ssbo_addr_format;
+
+ case vtn_variable_mode_push_constant:
+ return b->options->push_const_addr_format;
+
+ case vtn_variable_mode_workgroup:
+ return b->options->shared_addr_format;
+
+ case vtn_variable_mode_cross_workgroup:
+ return b->options->global_addr_format;
+
+ case vtn_variable_mode_function:
+ if (b->physical_ptrs)
+ return b->options->temp_addr_format;
+ /* Fall through. */
+
+ case vtn_variable_mode_private:
+ case vtn_variable_mode_uniform:
+ case vtn_variable_mode_input:
+ case vtn_variable_mode_output:
+ case vtn_variable_mode_image:
+ return nir_address_format_logical;
+ }
+
+ unreachable("Invalid variable mode");
+}
+
+nir_ssa_def *
+vtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr)
+{
+ if (vtn_pointer_uses_ssa_offset(b, ptr)) {
+ /* This pointer needs to have a pointer type with actual storage */
+ vtn_assert(ptr->ptr_type);
+ vtn_assert(ptr->ptr_type->type);
+
+ if (!ptr->offset) {
+ /* If we don't have an offset then we must be a pointer to the variable
+ * itself.
+ */
+ vtn_assert(!ptr->offset && !ptr->block_index);
+
+ struct vtn_access_chain chain = {
+ .length = 0,
+ };
+ ptr = vtn_ssa_offset_pointer_dereference(b, ptr, &chain);
+ }
+
+ vtn_assert(ptr->offset);
+ if (ptr->block_index) {
+ vtn_assert(ptr->mode == vtn_variable_mode_ubo ||
+ ptr->mode == vtn_variable_mode_ssbo);
+ return nir_vec2(&b->nb, ptr->block_index, ptr->offset);
+ } else {
+ vtn_assert(ptr->mode == vtn_variable_mode_workgroup);
+ return ptr->offset;
+ }
+ } else {
+ if (vtn_pointer_is_external_block(b, ptr) &&
+ vtn_type_contains_block(b, ptr->type) &&
+ ptr->mode != vtn_variable_mode_phys_ssbo) {
+ /* In this case, we're looking for a block index and not an actual
+ * deref.
+ *
+ * For PhysicalStorageBuffer pointers, we don't have a block index
+ * at all because we get the pointer directly from the client. This
+ * assumes that there will never be a SSBO binding variable using the
+ * PhysicalStorageBuffer storage class. This assumption appears
+ * to be correct according to the Vulkan spec because the table,
+ * "Shader Resource and Storage Class Correspondence," the only the
+ * Uniform storage class with BufferBlock or the StorageBuffer
+ * storage class with Block can be used.
+ */
+ if (!ptr->block_index) {
+ /* If we don't have a block_index then we must be a pointer to the
+ * variable itself.
+ */
+ vtn_assert(!ptr->deref);
+
+ struct vtn_access_chain chain = {
+ .length = 0,
+ };
+ ptr = vtn_nir_deref_pointer_dereference(b, ptr, &chain);
+ }
+
+ return ptr->block_index;
+ } else {
+ return &vtn_pointer_to_deref(b, ptr)->dest.ssa;
+ }
+ }
+}
+
+struct vtn_pointer *
+vtn_pointer_from_ssa(struct vtn_builder *b, nir_ssa_def *ssa,
+ struct vtn_type *ptr_type)
+{
+ vtn_assert(ptr_type->base_type == vtn_base_type_pointer);
+
+ struct vtn_pointer *ptr = rzalloc(b, struct vtn_pointer);
+ struct vtn_type *without_array =
+ vtn_type_without_array(ptr_type->deref);
+
+ nir_variable_mode nir_mode;
+ ptr->mode = vtn_storage_class_to_mode(b, ptr_type->storage_class,
+ without_array, &nir_mode);
+ ptr->type = ptr_type->deref;
+ ptr->ptr_type = ptr_type;
+
+ if (b->wa_glslang_179) {
+ /* To work around https://github.com/KhronosGroup/glslang/issues/179 we
+ * need to whack the mode because it creates a function parameter with
+ * the Function storage class even though it's a pointer to a sampler.
+ * If we don't do this, then NIR won't get rid of the deref_cast for us.
+ */
+ if (ptr->mode == vtn_variable_mode_function &&
+ (ptr->type->base_type == vtn_base_type_sampler ||
+ ptr->type->base_type == vtn_base_type_sampled_image)) {
+ ptr->mode = vtn_variable_mode_uniform;
+ nir_mode = nir_var_uniform;
+ }
+ }
+
+ if (vtn_pointer_uses_ssa_offset(b, ptr)) {
+ /* This pointer type needs to have actual storage */
+ vtn_assert(ptr_type->type);
+ if (ptr->mode == vtn_variable_mode_ubo ||
+ ptr->mode == vtn_variable_mode_ssbo) {
+ vtn_assert(ssa->num_components == 2);
+ ptr->block_index = nir_channel(&b->nb, ssa, 0);
+ ptr->offset = nir_channel(&b->nb, ssa, 1);
+ } else {
+ vtn_assert(ssa->num_components == 1);
+ ptr->block_index = NULL;
+ ptr->offset = ssa;
+ }
+ } else {
+ const struct glsl_type *deref_type = ptr_type->deref->type;
+ if (!vtn_pointer_is_external_block(b, ptr)) {
+ ptr->deref = nir_build_deref_cast(&b->nb, ssa, nir_mode,
+ deref_type, ptr_type->stride);
+ } else if (vtn_type_contains_block(b, ptr->type) &&
+ ptr->mode != vtn_variable_mode_phys_ssbo) {
+ /* This is a pointer to somewhere in an array of blocks, not a
+ * pointer to somewhere inside the block. Set the block index
+ * instead of making a cast.
+ */
+ ptr->block_index = ssa;
+ } else {
+ /* This is a pointer to something internal or a pointer inside a
+ * block. It's just a regular cast.
+ *
+ * For PhysicalStorageBuffer pointers, we don't have a block index
+ * at all because we get the pointer directly from the client. This
+ * assumes that there will never be a SSBO binding variable using the
+ * PhysicalStorageBuffer storage class. This assumption appears
+ * to be correct according to the Vulkan spec because the table,
+ * "Shader Resource and Storage Class Correspondence," the only the
+ * Uniform storage class with BufferBlock or the StorageBuffer
+ * storage class with Block can be used.
+ */
+ ptr->deref = nir_build_deref_cast(&b->nb, ssa, nir_mode,
+ ptr_type->deref->type,
+ ptr_type->stride);
+ ptr->deref->dest.ssa.num_components =
+ glsl_get_vector_elements(ptr_type->type);
+ ptr->deref->dest.ssa.bit_size = glsl_get_bit_size(ptr_type->type);