turnip: lower samplers and uniform buffer indices
authorJonathan Marek <jonathan@marek.ca>
Thu, 26 Sep 2019 04:29:26 +0000 (00:29 -0400)
committerJonathan Marek <jonathan@marek.ca>
Thu, 26 Sep 2019 21:18:13 +0000 (17:18 -0400)
Lower these to something compatible with ir3, and save the descriptor set
and binding information.

Signed-off-by: Jonathan Marek <jonathan@marek.ca>
Reviewed-by: Kristian H. Kristensen <hoegsberg@google.com>
Acked-by: Eric Anholt <eric@anholt.net>
src/freedreno/vulkan/tu_private.h
src/freedreno/vulkan/tu_shader.c

index 170be1a149e56509b8ebf109741df4fc9aa3f1d4..c93d7fa6357707d671b98b4064255f136548d407 100644 (file)
@@ -1023,10 +1023,21 @@ struct tu_shader_compile_options
    bool include_binning_pass;
 };
 
+struct tu_descriptor_map
+{
+   unsigned num;
+   int set[32];
+   int binding[32];
+};
+
 struct tu_shader
 {
    struct ir3_shader ir3_shader;
 
+   struct tu_descriptor_map texture_map;
+   struct tu_descriptor_map sampler_map;
+   struct tu_descriptor_map ubo_map;
+
    /* This may be true for vertex shaders.  When true, variants[1] is the
     * binning variant and binning_binary is non-NULL.
     */
index f3d81675bb17e692f4407169ed13bb05efc9fb6e..37edf82642baeaa9fb1e87d3d96a99acd1c3b2b2 100644 (file)
@@ -107,6 +107,139 @@ tu_sort_variables_by_location(struct exec_list *variables)
    exec_list_move_nodes_to(&sorted, variables);
 }
 
+static unsigned
+map_add(struct tu_descriptor_map *map, int set, int binding)
+{
+   unsigned index;
+   for (index = 0; index < map->num; index++) {
+      if (set == map->set[index] && binding == map->binding[index])
+         break;
+   }
+
+   assert(index < ARRAY_SIZE(map->set));
+
+   map->set[index] = set;
+   map->binding[index] = binding;
+   map->num = MAX2(map->num, index + 1);
+   return index;
+}
+
+static void
+lower_tex_src_to_offset(nir_tex_instr *instr, unsigned src_idx,
+                        struct tu_shader *shader, bool is_sampler)
+{
+   nir_deref_instr *deref =
+      nir_instr_as_deref(instr->src[src_idx].src.ssa->parent_instr);
+
+   if (deref->deref_type != nir_deref_type_var) {
+      tu_finishme("sampler array");
+      return;
+   }
+
+   if (is_sampler) {
+      instr->sampler_index = map_add(&shader->sampler_map,
+                                     deref->var->data.descriptor_set,
+                                     deref->var->data.binding);
+   } else {
+      instr->texture_index = map_add(&shader->texture_map,
+                                     deref->var->data.descriptor_set,
+                                     deref->var->data.binding);
+      instr->texture_array_size = 1;
+   }
+
+   nir_tex_instr_remove_src(instr, src_idx);
+}
+
+static bool
+lower_sampler(nir_tex_instr *instr, struct tu_shader *shader)
+{
+   int texture_idx =
+      nir_tex_instr_src_index(instr, nir_tex_src_texture_deref);
+
+   if (texture_idx >= 0)
+      lower_tex_src_to_offset(instr, texture_idx, shader, false);
+
+   int sampler_idx =
+      nir_tex_instr_src_index(instr, nir_tex_src_sampler_deref);
+
+   if (sampler_idx >= 0)
+      lower_tex_src_to_offset(instr, sampler_idx, shader, true);
+
+   if (texture_idx < 0 && sampler_idx < 0)
+      return false;
+
+   return true;
+}
+
+static bool
+lower_intrinsic(nir_builder *b, nir_intrinsic_instr *instr,
+                struct tu_shader *shader)
+{
+   if (instr->intrinsic != nir_intrinsic_vulkan_resource_index)
+      return false;
+
+   nir_const_value *const_val = nir_src_as_const_value(instr->src[0]);
+   if (!const_val || const_val->u32 != 0) {
+      tu_finishme("non-zero vulkan_resource_index array index");
+      return false;
+   }
+
+   if (nir_intrinsic_desc_type(instr) != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) {
+      tu_finishme("non-ubo vulkan_resource_index");
+      return false;
+   }
+
+   unsigned index = map_add(&shader->ubo_map,
+                            nir_intrinsic_desc_set(instr),
+                            nir_intrinsic_binding(instr));
+
+   b->cursor = nir_before_instr(&instr->instr);
+   /* skip index 0 because ir3 treats it differently */
+   nir_ssa_def_rewrite_uses(&instr->dest.ssa,
+                            nir_src_for_ssa(nir_imm_int(b, index + 1)));
+   nir_instr_remove(&instr->instr);
+
+   return true;
+}
+
+static bool
+lower_impl(nir_function_impl *impl, struct tu_shader *shader)
+{
+   nir_builder b;
+   nir_builder_init(&b, impl);
+   bool progress = false;
+
+   nir_foreach_block(block, impl) {
+      nir_foreach_instr_safe(instr, block) {
+         switch (instr->type) {
+         case nir_instr_type_tex:
+            progress |= lower_sampler(nir_instr_as_tex(instr), shader);
+            break;
+         case nir_instr_type_intrinsic:
+            progress |= lower_intrinsic(&b, nir_instr_as_intrinsic(instr), shader);
+            break;
+         default:
+            break;
+         }
+      }
+   }
+
+   return progress;
+}
+
+static bool
+tu_lower_io(nir_shader *shader, struct tu_shader *tu_shader)
+{
+   bool progress = false;
+
+   nir_foreach_function(function, shader) {
+      if (function->impl)
+         progress |= lower_impl(function->impl, tu_shader);
+   }
+
+   return progress;
+}
+
 struct tu_shader *
 tu_shader_create(struct tu_device *dev,
                  gl_shader_stage stage,
@@ -171,6 +304,9 @@ tu_shader_create(struct tu_device *dev,
 
    NIR_PASS_V(nir, nir_lower_system_values);
    NIR_PASS_V(nir, nir_lower_frexp);
+
+   NIR_PASS_V(nir, tu_lower_io, shader);
+
    NIR_PASS_V(nir, nir_lower_io, nir_var_all, ir3_glsl_type_size, 0);
 
    nir_shader_gather_info(nir, nir_shader_get_entrypoint(nir));