#include "nir_deref.h"
#include "util/hash_table.h"
+static bool
+is_trivial_deref_cast(nir_deref_instr *cast)
+{
+ nir_deref_instr *parent = nir_src_as_deref(cast->parent);
+ if (!parent)
+ return false;
+
+ return cast->mode == parent->mode &&
+ cast->type == parent->type &&
+ cast->dest.ssa.num_components == parent->dest.ssa.num_components &&
+ cast->dest.ssa.bit_size == parent->dest.ssa.bit_size;
+}
+
void
nir_deref_path_init(nir_deref_path *path,
nir_deref_instr *deref, void *mem_ctx)
*tail = NULL;
for (nir_deref_instr *d = deref; d; d = nir_deref_instr_parent(d)) {
+ if (d->deref_type == nir_deref_type_cast && is_trivial_deref_cast(d))
+ continue;
count++;
if (count <= max_short_path_len)
*(--head) = d;
path->path = ralloc_array(mem_ctx, nir_deref_instr *, count + 1);
head = tail = path->path + count;
*tail = NULL;
- for (nir_deref_instr *d = deref; d; d = nir_deref_instr_parent(d))
+ for (nir_deref_instr *d = deref; d; d = nir_deref_instr_parent(d)) {
+ if (d->deref_type == nir_deref_type_cast && is_trivial_deref_cast(d))
+ continue;
*(--head) = d;
+ }
done:
assert(head == path->path);
nir_deref_path path;
nir_deref_path_init(&path, deref, NULL);
- assert(path.path[0]->deref_type == nir_deref_type_var);
-
unsigned offset = 0;
for (nir_deref_instr **p = &path.path[1]; *p; p++) {
- if ((*p)->deref_type == nir_deref_type_array) {
+ switch ((*p)->deref_type) {
+ case nir_deref_type_array:
offset += nir_src_as_uint((*p)->arr.index) *
type_get_array_stride((*p)->type, size_align);
- } else if ((*p)->deref_type == nir_deref_type_struct) {
+ break;
+ case nir_deref_type_struct: {
/* p starts at path[1], so this is safe */
nir_deref_instr *parent = *(p - 1);
offset += struct_type_get_field_offset(parent->type, size_align,
(*p)->strct.index);
- } else {
+ break;
+ }
+ case nir_deref_type_cast:
+ /* A cast doesn't contribute to the offset */
+ break;
+ default:
unreachable("Unsupported deref type");
}
}
nir_deref_path path;
nir_deref_path_init(&path, deref, NULL);
- assert(path.path[0]->deref_type == nir_deref_type_var);
-
- nir_ssa_def *offset = nir_imm_int(b, 0);
+ nir_ssa_def *offset = nir_imm_intN_t(b, 0, deref->dest.ssa.bit_size);
for (nir_deref_instr **p = &path.path[1]; *p; p++) {
- if ((*p)->deref_type == nir_deref_type_array) {
+ switch ((*p)->deref_type) {
+ case nir_deref_type_array: {
nir_ssa_def *index = nir_ssa_for_src(b, (*p)->arr.index, 1);
int stride = type_get_array_stride((*p)->type, size_align);
offset = nir_iadd(b, offset, nir_amul_imm(b, index, stride));
- } else if ((*p)->deref_type == nir_deref_type_struct) {
+ break;
+ }
+ case nir_deref_type_struct: {
/* p starts at path[1], so this is safe */
nir_deref_instr *parent = *(p - 1);
unsigned field_offset =
struct_type_get_field_offset(parent->type, size_align,
(*p)->strct.index);
offset = nir_iadd_imm(b, offset, field_offset);
- } else {
+ break;
+ }
+ case nir_deref_type_cast:
+ /* A cast doesn't contribute to the offset */
+ break;
+ default:
unreachable("Unsupported deref type");
}
}
{
assert(path->path[0]->deref_type == nir_deref_type_var);
- if (path->path[0]->var->data.image.access & ACCESS_COHERENT)
+ if (path->path[0]->var->data.access & ACCESS_COHERENT)
return true;
for (nir_deref_instr **p = &path->path[1]; *p; p++) {
}
/* We're at either the tail or the divergence point between the two deref
- * paths. Look to see if either contains a ptr_as_array deref. It it
- * does we don't know how to safely make any inferences. Hopefully,
+ * paths. Look to see if either contains cast or a ptr_as_array deref. If
+ * it does we don't know how to safely make any inferences. Hopefully,
* nir_opt_deref will clean most of these up and we can start inferring
* things again.
*
* different constant indices.
*/
for (nir_deref_instr **t_p = a_p; *t_p; t_p++) {
- if ((*t_p)->deref_type == nir_deref_type_ptr_as_array)
+ if ((*t_p)->deref_type == nir_deref_type_cast ||
+ (*t_p)->deref_type == nir_deref_type_ptr_as_array)
return nir_derefs_may_alias_bit;
}
for (nir_deref_instr **t_p = b_p; *t_p; t_p++) {
- if ((*t_p)->deref_type == nir_deref_type_ptr_as_array)
+ if ((*t_p)->deref_type == nir_deref_type_cast ||
+ (*t_p)->deref_type == nir_deref_type_ptr_as_array)
return nir_derefs_may_alias_bit;
}
switch (deref->deref_type) {
case nir_deref_type_var:
case nir_deref_type_array_wildcard:
- case nir_deref_type_cast:
/* Nothing more to do */
break;
+ case nir_deref_type_cast:
+ new_deref->cast.ptr_stride = deref->cast.ptr_stride;
+ break;
+
case nir_deref_type_array:
+ case nir_deref_type_ptr_as_array:
assert(!nir_src_as_deref(deref->arr.index));
nir_src_copy(&new_deref->arr.index, &deref->arr.index, new_deref);
break;
return state.progress;
}
-static bool
-is_trivial_deref_cast(nir_deref_instr *cast)
+static void
+nir_deref_instr_fixup_child_types(nir_deref_instr *parent)
{
- nir_deref_instr *parent = nir_src_as_deref(cast->parent);
- if (!parent)
- return false;
+ nir_foreach_use(use, &parent->dest.ssa) {
+ if (use->parent_instr->type != nir_instr_type_deref)
+ continue;
- return cast->mode == parent->mode &&
- cast->type == parent->type &&
- cast->dest.ssa.num_components == parent->dest.ssa.num_components &&
- cast->dest.ssa.bit_size == parent->dest.ssa.bit_size;
+ nir_deref_instr *child = nir_instr_as_deref(use->parent_instr);
+ switch (child->deref_type) {
+ case nir_deref_type_var:
+ unreachable("nir_deref_type_var cannot be a child");
+
+ case nir_deref_type_array:
+ case nir_deref_type_array_wildcard:
+ child->type = glsl_get_array_element(parent->type);
+ break;
+
+ case nir_deref_type_ptr_as_array:
+ child->type = parent->type;
+ break;
+
+ case nir_deref_type_struct:
+ child->type = glsl_get_struct_field(parent->type,
+ child->strct.index);
+ break;
+
+ case nir_deref_type_cast:
+ /* We stop the recursion here */
+ continue;
+ }
+
+ /* Recurse into children */
+ nir_deref_instr_fixup_child_types(child);
+ }
}
static bool
return true;
}
+static bool
+opt_remove_sampler_cast(nir_deref_instr *cast)
+{
+ assert(cast->deref_type == nir_deref_type_cast);
+ nir_deref_instr *parent = nir_src_as_deref(cast->parent);
+ if (parent == NULL)
+ return false;
+
+ /* Strip both types down to their non-array type and bail if there are any
+ * discrepancies in array lengths.
+ */
+ const struct glsl_type *parent_type = parent->type;
+ const struct glsl_type *cast_type = cast->type;
+ while (glsl_type_is_array(parent_type) && glsl_type_is_array(cast_type)) {
+ if (glsl_get_length(parent_type) != glsl_get_length(cast_type))
+ return false;
+ parent_type = glsl_get_array_element(parent_type);
+ cast_type = glsl_get_array_element(cast_type);
+ }
+
+ if (glsl_type_is_array(parent_type) || glsl_type_is_array(cast_type))
+ return false;
+
+ if (!glsl_type_is_sampler(parent_type) ||
+ cast_type != glsl_bare_sampler_type())
+ return false;
+
+ /* We're a cast from a more detailed sampler type to a bare sampler */
+ nir_ssa_def_rewrite_uses(&cast->dest.ssa,
+ nir_src_for_ssa(&parent->dest.ssa));
+ nir_instr_remove(&cast->instr);
+
+ /* Recursively crawl the deref tree and clean up types */
+ nir_deref_instr_fixup_child_types(parent);
+
+ return true;
+}
+
/**
* Is this casting a struct to a contained struct.
* struct a { struct b field0 };
if (opt_replace_struct_wrapper_cast(b, cast))
return true;
+ if (opt_remove_sampler_cast(cast))
+ return true;
+
progress = opt_remove_cast_cast(cast);
if (!is_trivial_deref_cast(cast))
return progress;
/* If uses would be a bit crazy */
assert(list_is_empty(&cast->dest.ssa.if_uses));
- nir_deref_instr_remove_if_unused(cast);
+ if (nir_deref_instr_remove_if_unused(cast))
+ progress = true;
+
return progress;
}
nir_metadata_preserve(impl, nir_metadata_block_index |
nir_metadata_dominance);
} else {
-#ifndef NDEBUG
- impl->valid_metadata &= ~nir_metadata_not_properly_reset;
-#endif
+ nir_metadata_preserve(impl, nir_metadata_all);
}
return progress;