nir: Add options to nir_lower_compute_system_values to control compute ID base lowering
[mesa.git] / src / gallium / frontends / vallium / val_lower_vulkan_resource.c
1 /*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "val_private.h"
25 #include "nir.h"
26 #include "nir_builder.h"
27 #include "val_lower_vulkan_resource.h"
28
29 static bool
30 lower_vulkan_resource_index(const nir_instr *instr, const void *data_cb)
31 {
32 if (instr->type == nir_instr_type_intrinsic) {
33 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
34 if (intrin->intrinsic == nir_intrinsic_vulkan_resource_index)
35 return true;
36 }
37 if (instr->type == nir_instr_type_tex) {
38 return true;
39 }
40 return false;
41 }
42
43 static nir_ssa_def *lower_vri_intrin_vri(struct nir_builder *b,
44 nir_instr *instr, void *data_cb)
45 {
46 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
47 unsigned desc_set_idx = nir_intrinsic_desc_set(intrin);
48 unsigned binding_idx = nir_intrinsic_binding(intrin);
49 struct val_pipeline_layout *layout = data_cb;
50 struct val_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
51 int value = 0;
52 bool is_ubo = (binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
53 binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
54
55 for (unsigned s = 0; s < desc_set_idx; s++) {
56 if (is_ubo)
57 value += layout->set[s].layout->stage[b->shader->info.stage].const_buffer_count;
58 else
59 value += layout->set[s].layout->stage[b->shader->info.stage].shader_buffer_count;
60 }
61 if (is_ubo)
62 value += binding->stage[b->shader->info.stage].const_buffer_index + 1;
63 else
64 value += binding->stage[b->shader->info.stage].shader_buffer_index;
65 if (nir_src_is_const(intrin->src[0])) {
66 value += nir_src_comp_as_int(intrin->src[0], 0);
67 return nir_imm_int(b, value);
68 } else
69 return nir_iadd_imm(b, intrin->src[0].ssa, value);
70 }
71
72 static int lower_vri_instr_tex_deref(nir_tex_instr *tex,
73 nir_tex_src_type deref_src_type,
74 gl_shader_stage stage,
75 struct val_pipeline_layout *layout)
76 {
77 int deref_src_idx = nir_tex_instr_src_index(tex, deref_src_type);
78
79 if (deref_src_idx < 0)
80 return -1;
81
82 nir_deref_instr *deref_instr = nir_src_as_deref(tex->src[deref_src_idx].src);
83 nir_variable *var = nir_deref_instr_get_variable(deref_instr);
84 unsigned desc_set_idx = var->data.descriptor_set;
85 unsigned binding_idx = var->data.binding;
86 int value = 0;
87 struct val_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
88 nir_tex_instr_remove_src(tex, deref_src_idx);
89 for (unsigned s = 0; s < desc_set_idx; s++) {
90 if (deref_src_type == nir_tex_src_sampler_deref)
91 value += layout->set[s].layout->stage[stage].sampler_count;
92 else
93 value += layout->set[s].layout->stage[stage].sampler_view_count;
94 }
95 if (deref_src_type == nir_tex_src_sampler_deref)
96 value += binding->stage[stage].sampler_index;
97 else
98 value += binding->stage[stage].sampler_view_index;
99
100 if (deref_instr->deref_type == nir_deref_type_array) {
101 if (nir_src_is_const(deref_instr->arr.index))
102 value += nir_src_as_uint(deref_instr->arr.index);
103 else {
104 if (deref_src_type == nir_tex_src_sampler_deref)
105 nir_tex_instr_add_src(tex, nir_tex_src_sampler_offset, deref_instr->arr.index);
106 else
107 nir_tex_instr_add_src(tex, nir_tex_src_texture_offset, deref_instr->arr.index);
108 }
109 }
110 if (deref_src_type == nir_tex_src_sampler_deref)
111 tex->sampler_index = value;
112 else
113 tex->texture_index = value;
114 return value;
115 }
116
117 static void lower_vri_instr_tex(struct nir_builder *b,
118 nir_tex_instr *tex, void *data_cb)
119 {
120 struct val_pipeline_layout *layout = data_cb;
121 int tex_value = 0;
122
123 lower_vri_instr_tex_deref(tex, nir_tex_src_sampler_deref, b->shader->info.stage, layout);
124 tex_value = lower_vri_instr_tex_deref(tex, nir_tex_src_texture_deref, b->shader->info.stage, layout);
125 if (tex_value >= 0)
126 b->shader->info.textures_used |= (1 << tex_value);
127 }
128
129 static nir_ssa_def *lower_vri_instr(struct nir_builder *b,
130 nir_instr *instr, void *data_cb)
131 {
132 if (instr->type == nir_instr_type_intrinsic) {
133 nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
134 if (intrin->intrinsic == nir_intrinsic_vulkan_resource_index)
135 return lower_vri_intrin_vri(b, instr, data_cb);
136 }
137 if (instr->type == nir_instr_type_tex)
138 lower_vri_instr_tex(b, nir_instr_as_tex(instr), data_cb);
139 return NULL;
140 }
141
142 void val_lower_pipeline_layout(const struct val_device *device,
143 struct val_pipeline_layout *layout,
144 nir_shader *shader)
145 {
146 nir_shader_lower_instructions(shader, lower_vulkan_resource_index, lower_vri_instr, layout);
147 nir_foreach_uniform_variable(var, shader) {
148 const struct glsl_type *type = var->type;
149 enum glsl_base_type base_type =
150 glsl_get_base_type(glsl_without_array(type));
151 unsigned desc_set_idx = var->data.descriptor_set;
152 unsigned binding_idx = var->data.binding;
153 struct val_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
154 int value = 0;
155 var->data.descriptor_set = 0;
156 if (base_type == GLSL_TYPE_SAMPLER) {
157 if (binding->type == VK_DESCRIPTOR_TYPE_SAMPLER) {
158 for (unsigned s = 0; s < desc_set_idx; s++)
159 value += layout->set[s].layout->stage[shader->info.stage].sampler_count;
160 value += binding->stage[shader->info.stage].sampler_index;
161 } else {
162 for (unsigned s = 0; s < desc_set_idx; s++)
163 value += layout->set[s].layout->stage[shader->info.stage].sampler_view_count;
164 value += binding->stage[shader->info.stage].sampler_view_index;
165 }
166 var->data.binding = value;
167 }
168 if (base_type == GLSL_TYPE_IMAGE) {
169 var->data.descriptor_set = 0;
170 for (unsigned s = 0; s < desc_set_idx; s++)
171 value += layout->set[s].layout->stage[shader->info.stage].image_count;
172 value += binding->stage[shader->info.stage].image_index;
173 var->data.binding = value;
174 }
175 }
176 }