+ nir_ssa_def *desc_offset, *array_index;
+ switch (state->ssbo_addr_format) {
+ case nir_address_format_64bit_bounded_global:
+ /* See also lower_res_index_intrinsic() */
+ desc_offset = nir_channel(b, index, 0);
+ array_index = nir_umin(b, nir_channel(b, index, 1),
+ nir_channel(b, index, 2));
+ break;
+
+ case nir_address_format_64bit_global:
+ /* See also lower_res_index_intrinsic() */
+ desc_offset = nir_unpack_64_2x32_split_x(b, index);
+ array_index = nir_unpack_64_2x32_split_y(b, index);
+ break;
+
+ default:
+ unreachable("Unhandled address format for SSBO");
+ }
+
+ nir_ssa_def *dyn_offset_base =
+ nir_extract_u8(b, desc_offset, nir_imm_int(b, 0));
+ nir_ssa_def *dyn_offset_idx =
+ nir_iadd(b, dyn_offset_base, array_index);
+ if (state->add_bounds_checks) {
+ dyn_offset_idx = nir_umin(b, dyn_offset_idx,
+ nir_imm_int(b, MAX_DYNAMIC_BUFFERS));
+ }
+
+ nir_intrinsic_instr *dyn_load =
+ nir_intrinsic_instr_create(b->shader,
+ nir_intrinsic_load_push_constant);
+ nir_intrinsic_set_base(dyn_load, offsetof(struct anv_push_constants,
+ dynamic_offsets));
+ nir_intrinsic_set_range(dyn_load, MAX_DYNAMIC_BUFFERS * 4);
+ dyn_load->src[0] = nir_src_for_ssa(nir_imul_imm(b, dyn_offset_idx, 4));
+ dyn_load->num_components = 1;
+ nir_ssa_dest_init(&dyn_load->instr, &dyn_load->dest, 1, 32, NULL);
+ nir_builder_instr_insert(b, &dyn_load->instr);
+
+ nir_ssa_def *dynamic_offset =
+ nir_bcsel(b, nir_ieq(b, dyn_offset_base, nir_imm_int(b, 0xff)),
+ nir_imm_int(b, 0), &dyn_load->dest.ssa);
+
+ switch (state->ssbo_addr_format) {
+ case nir_address_format_64bit_bounded_global: {
+ /* The dynamic offset gets added to the base pointer so that we
+ * have a sliding window range.
+ */
+ nir_ssa_def *base_ptr =
+ nir_pack_64_2x32(b, nir_channels(b, desc, 0x3));
+ base_ptr = nir_iadd(b, base_ptr, nir_u2u64(b, dynamic_offset));
+ desc = nir_vec4(b, nir_unpack_64_2x32_split_x(b, base_ptr),
+ nir_unpack_64_2x32_split_y(b, base_ptr),
+ nir_channel(b, desc, 2),
+ nir_channel(b, desc, 3));
+ break;
+ }
+
+ case nir_address_format_64bit_global:
+ desc = nir_iadd(b, desc, nir_u2u64(b, dynamic_offset));
+ break;
+
+ default:
+ unreachable("Unhandled address format for SSBO");
+ }
+ }
+ } else {
+ /* We follow the nir_address_format_32bit_index_offset model */
+ desc = index;
+ }
+
+ assert(intrin->dest.is_ssa);
+ nir_ssa_def_rewrite_uses(&intrin->dest.ssa, nir_src_for_ssa(desc));
+ nir_instr_remove(&intrin->instr);
+}
+
+static void
+lower_get_buffer_size(nir_intrinsic_instr *intrin,
+ struct apply_pipeline_layout_state *state)
+{
+ if (_mesa_set_search(state->lowered_instrs, intrin))
+ return;
+
+ nir_builder *b = &state->builder;
+
+ b->cursor = nir_before_instr(&intrin->instr);
+
+ const VkDescriptorType desc_type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+
+ assert(intrin->src[0].is_ssa);
+ nir_ssa_def *index = intrin->src[0].ssa;
+
+ if (state->pdevice->has_a64_buffer_access) {
+ nir_ssa_def *desc = build_ssbo_descriptor_load(desc_type, index, state);
+ nir_ssa_def *size = nir_channel(b, desc, 2);
+ nir_ssa_def_rewrite_uses(&intrin->dest.ssa, nir_src_for_ssa(size));
+ nir_instr_remove(&intrin->instr);
+ } else {
+ /* We're following the nir_address_format_32bit_index_offset model so
+ * the binding table index is the first component of the address. The
+ * back-end wants a scalar binding table index source.
+ */
+ nir_instr_rewrite_src(&intrin->instr, &intrin->src[0],
+ nir_src_for_ssa(nir_channel(b, index, 0)));
+ }
+}
+
+static nir_ssa_def *
+build_descriptor_load(nir_deref_instr *deref, unsigned offset,
+ unsigned num_components, unsigned bit_size,
+ struct apply_pipeline_layout_state *state)
+{
+ nir_variable *var = nir_deref_instr_get_variable(deref);
+
+ unsigned set = var->data.descriptor_set;
+ unsigned binding = var->data.binding;
+ unsigned array_size =
+ state->layout->set[set].layout->binding[binding].array_size;
+
+ const struct anv_descriptor_set_binding_layout *bind_layout =
+ &state->layout->set[set].layout->binding[binding];
+
+ nir_builder *b = &state->builder;
+
+ nir_ssa_def *desc_buffer_index =
+ nir_imm_int(b, state->set[set].desc_offset);
+
+ nir_ssa_def *desc_offset =
+ nir_imm_int(b, bind_layout->descriptor_offset + offset);
+ if (deref->deref_type != nir_deref_type_var) {
+ assert(deref->deref_type == nir_deref_type_array);
+
+ const unsigned descriptor_size = anv_descriptor_size(bind_layout);
+ nir_ssa_def *arr_index = nir_ssa_for_src(b, deref->arr.index, 1);
+ if (state->add_bounds_checks)
+ arr_index = nir_umin(b, arr_index, nir_imm_int(b, array_size - 1));
+
+ desc_offset = nir_iadd(b, desc_offset,
+ nir_imul_imm(b, arr_index, descriptor_size));
+ }
+
+ nir_intrinsic_instr *desc_load =
+ nir_intrinsic_instr_create(b->shader, nir_intrinsic_load_ubo);
+ desc_load->src[0] = nir_src_for_ssa(desc_buffer_index);
+ desc_load->src[1] = nir_src_for_ssa(desc_offset);
+ nir_intrinsic_set_align(desc_load, 8, offset % 8);
+ desc_load->num_components = num_components;
+ nir_ssa_dest_init(&desc_load->instr, &desc_load->dest,
+ num_components, bit_size, NULL);
+ nir_builder_instr_insert(b, &desc_load->instr);
+
+ return &desc_load->dest.ssa;
+}
+
+static void
+lower_image_intrinsic(nir_intrinsic_instr *intrin,
+ struct apply_pipeline_layout_state *state)
+{
+ nir_deref_instr *deref = nir_src_as_deref(intrin->src[0]);
+ nir_variable *var = nir_deref_instr_get_variable(deref);
+
+ unsigned set = var->data.descriptor_set;
+ unsigned binding = var->data.binding;
+ unsigned binding_offset = state->set[set].surface_offsets[binding];
+
+ nir_builder *b = &state->builder;
+ b->cursor = nir_before_instr(&intrin->instr);
+
+ ASSERTED const bool use_bindless = state->pdevice->has_bindless_images;
+
+ if (intrin->intrinsic == nir_intrinsic_image_deref_load_param_intel) {
+ b->cursor = nir_instr_remove(&intrin->instr);
+
+ assert(!use_bindless); /* Otherwise our offsets would be wrong */
+ const unsigned param = nir_intrinsic_base(intrin);
+
+ nir_ssa_def *desc =
+ build_descriptor_load(deref, param * 16,
+ intrin->dest.ssa.num_components,
+ intrin->dest.ssa.bit_size, state);
+
+ nir_ssa_def_rewrite_uses(&intrin->dest.ssa, nir_src_for_ssa(desc));
+ } else if (binding_offset > MAX_BINDING_TABLE_SIZE) {
+ const bool write_only =
+ (var->data.access & ACCESS_NON_READABLE) != 0;
+ nir_ssa_def *desc =
+ build_descriptor_load(deref, 0, 2, 32, state);
+ nir_ssa_def *handle = nir_channel(b, desc, write_only ? 1 : 0);
+ nir_rewrite_image_intrinsic(intrin, handle, true);
+ } else {
+ unsigned array_size =
+ state->layout->set[set].layout->binding[binding].array_size;
+
+ nir_ssa_def *index = NULL;
+ if (deref->deref_type != nir_deref_type_var) {
+ assert(deref->deref_type == nir_deref_type_array);
+ index = nir_ssa_for_src(b, deref->arr.index, 1);
+ if (state->add_bounds_checks)
+ index = nir_umin(b, index, nir_imm_int(b, array_size - 1));
+ } else {
+ index = nir_imm_int(b, 0);