/* Memory access. */
struct GTY(()) modref_access_node
{
+
+ /* Access range information (in bits). */
+ poly_int64 offset;
+ poly_int64 size;
+ poly_int64 max_size;
+
+ /* Offset from parmeter pointer to the base of the access (in bytes). */
+ poly_int64 parm_offset;
+
/* Index of parameter which specifies the base of access. -1 if base is not
a function parameter. */
int parm_index;
+ bool parm_offset_known;
/* Return true if access node holds no useful info. */
- bool useful_p ()
+ bool useful_p () const
{
return parm_index != -1;
}
+ /* Return true if range info is useful. */
+ bool range_info_useful_p () const
+ {
+ return parm_index != -1 && parm_offset_known;
+ }
+ /* Return true if both accesses are the same. */
+ bool operator == (modref_access_node &a) const
+ {
+ if (parm_index != a.parm_index)
+ return false;
+ if (parm_index >= 0)
+ {
+ if (parm_offset_known != a.parm_offset_known)
+ return false;
+ if (parm_offset_known
+ && !known_eq (parm_offset, a.parm_offset))
+ return false;
+ }
+ if (range_info_useful_p ()
+ && (!known_eq (a.offset, offset)
+ || !known_eq (a.size, size)
+ || !known_eq (a.max_size, max_size)))
+ return false;
+ return true;
+ }
};
+/* Access node specifying no useful info. */
+const modref_access_node unspecified_modref_access_node
+ = {0, -1, -1, 0, -1, false};
+
template <typename T>
struct GTY((user)) modref_ref_node
{
size_t i;
modref_access_node *a;
FOR_EACH_VEC_SAFE_ELT (accesses, i, a)
- if (a->parm_index == access.parm_index)
+ if (*a == access)
return a;
return NULL;
}
}
};
+/* Map translating parameters across function call. */
+
+struct modref_parm_map
+{
+ /* Index of parameter we translate to.
+ -1 indicates that parameter is unknown
+ -2 indicates that parmaeter points to local memory and access can be
+ discarded. */
+ int parm_index;
+ bool parm_offset_known;
+ poly_int64 parm_offset;
+};
+
/* Access tree for a single function. */
template <typename T>
struct GTY((user)) modref_tree
PARM_MAP, if non-NULL, maps parm indexes of callee to caller. -2 is used
to signalize that parameter is local and does not need to be tracked.
Return true if something has changed. */
- bool merge (modref_tree <T> *other, vec <int> *parm_map)
+ bool merge (modref_tree <T> *other, vec <modref_parm_map> *parm_map)
{
if (!other || every_base)
return false;
{
if (ref_node->every_access)
{
- modref_access_node a = {-1};
- changed |= insert (base_node->base, ref_node->ref, a);
+ changed |= insert (base_node->base,
+ ref_node->ref,
+ unspecified_modref_access_node);
}
else
FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
{
modref_access_node a = *access_node;
+
if (a.parm_index != -1 && parm_map)
{
if (a.parm_index >= (int)parm_map->length ())
a.parm_index = -1;
- else if ((*parm_map) [a.parm_index] == -2)
+ else if ((*parm_map) [a.parm_index].parm_index == -2)
continue;
else
- a.parm_index = (*parm_map) [a.parm_index];
+ {
+ a.parm_offset
+ += (*parm_map) [a.parm_index].parm_offset;
+ a.parm_offset_known
+ &= (*parm_map)
+ [a.parm_index].parm_offset_known;
+ a.parm_index
+ = (*parm_map) [a.parm_index].parm_index;
+ }
}
changed |= insert (base_node->base, ref_node->ref, a);
}
static void
dump_access (modref_access_node *a, FILE *out)
{
- fprintf (out, " Parm %i\n", a->parm_index);
+ fprintf (out, " access:");
+ if (a->parm_index != -1)
+ {
+ fprintf (out, " Parm %i", a->parm_index);
+ if (a->parm_offset_known)
+ {
+ fprintf (out, " param offset:");
+ print_dec ((poly_int64_pod)a->parm_offset, out, SIGNED);
+ }
+ }
+ if (a->range_info_useful_p ())
+ {
+ fprintf (out, " offset:");
+ print_dec ((poly_int64_pod)a->offset, out, SIGNED);
+ fprintf (out, " size:");
+ print_dec ((poly_int64_pod)a->size, out, SIGNED);
+ fprintf (out, " max_size:");
+ print_dec ((poly_int64_pod)a->max_size, out, SIGNED);
+ }
+ fprintf (out, "\n");
}
/* Dump records TT to OUT. */
static modref_access_node
get_access (ao_ref *ref)
{
- modref_access_node a;
tree base;
- base = ref->ref;
- while (handled_component_p (base))
- base = TREE_OPERAND (base, 0);
+ base = ao_ref_base (ref);
+ modref_access_node a = {ref->offset, ref->size, ref->max_size,
+ 0, -1, false};
if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
{
+ tree offset = TREE_CODE (base) == MEM_REF
+ ? TREE_OPERAND (base, 1) : NULL_TREE;
base = TREE_OPERAND (base, 0);
if (TREE_CODE (base) == SSA_NAME
&& SSA_NAME_IS_DEFAULT_DEF (base)
}
a.parm_index++;
}
+ a.parm_offset_known
+ = offset && wi::to_poly_offset (offset).to_shwi (&a.parm_offset);
}
else
a.parm_index = -1;
gimple *stmt, modref_summary *callee_summary,
bool ignore_stores)
{
- auto_vec <int, 32> parm_map;
+ auto_vec <modref_parm_map, 32> parm_map;
bool changed = false;
parm_map.safe_grow (gimple_call_num_args (stmt));
}
index++;
}
- parm_map[i] = index;
+ parm_map[i].parm_index = index;
+ parm_map[i].parm_offset_known = true;
+ parm_map[i].parm_offset = 0;
}
else if (points_to_local_or_readonly_memory_p (op))
- parm_map[i] = -2;
+ parm_map[i].parm_index = -2;
else
- parm_map[i] = -1;
+ parm_map[i].parm_index = -1;
}
/* Merge with callee's summary. */
size_t k;
modref_access_node *access_node;
FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
- streamer_write_uhwi (ob, access_node->parm_index);
+ {
+ streamer_write_uhwi (ob, access_node->parm_index);
+ if (access_node->parm_index != -1)
+ {
+ streamer_write_uhwi (ob, access_node->parm_offset_known);
+ if (access_node->parm_offset_known)
+ {
+ streamer_write_poly_int64 (ob, access_node->parm_offset);
+ streamer_write_poly_int64 (ob, access_node->offset);
+ streamer_write_poly_int64 (ob, access_node->size);
+ streamer_write_poly_int64 (ob, access_node->max_size);
+ }
+ }
+ }
}
}
}
for (size_t k = 0; k < naccesses; k++)
{
int parm_index = streamer_read_uhwi (ib);
- modref_access_node a = {parm_index};
+ bool parm_offset_known = false;
+ poly_int64 parm_offset = 0;
+ poly_int64 offset = 0;
+ poly_int64 size = -1;
+ poly_int64 max_size = -1;
+
+ if (parm_index != -1)
+ {
+ parm_offset_known = streamer_read_uhwi (ib);
+ if (parm_offset_known)
+ {
+ parm_offset = streamer_read_poly_int64 (ib);
+ offset = streamer_read_poly_int64 (ib);
+ size = streamer_read_poly_int64 (ib);
+ max_size = streamer_read_poly_int64 (ib);
+ }
+ }
+ modref_access_node a = {offset, size, max_size, parm_offset,
+ parm_index, parm_offset_known};
if (nolto_ref_node)
nolto_ref_node->insert_access (a, max_accesses);
if (lto_ref_node)
/* Compute parm_map for CALLE_EDGE. */
static void
-compute_parm_map (cgraph_edge *callee_edge, vec<int> *parm_map)
+compute_parm_map (cgraph_edge *callee_edge, vec<modref_parm_map> *parm_map)
{
class ipa_edge_args *args;
if (ipa_node_params_sum
{
if (es && es->param[i].points_to_local_or_readonly_memory)
{
- (*parm_map)[i] = -2;
+ (*parm_map)[i].parm_index = -2;
continue;
}
(callee_pi, i));
if (cst && points_to_local_or_readonly_memory_p (cst))
{
- (*parm_map)[i] = -2;
+ (*parm_map)[i].parm_index = -2;
continue;
}
}
if (jf && jf->type == IPA_JF_PASS_THROUGH)
{
- (*parm_map)[i]
+ (*parm_map)[i].parm_index
= ipa_get_jf_pass_through_formal_id (jf);
+ (*parm_map)[i].parm_offset_known
+ = ipa_get_jf_pass_through_operation (jf) == NOP_EXPR;
+ (*parm_map)[i].parm_offset = 0;
continue;
}
if (jf && jf->type == IPA_JF_ANCESTOR)
- (*parm_map)[i] = ipa_get_jf_ancestor_formal_id (jf);
+ {
+ (*parm_map)[i].parm_index = ipa_get_jf_ancestor_formal_id (jf);
+ (*parm_map)[i].parm_offset_known = true;
+ (*parm_map)[i].parm_offset = ipa_get_jf_ancestor_offset (jf);
+ }
else
- (*parm_map)[i] = -1;
+ (*parm_map)[i].parm_index = -1;
}
if (dump_file)
{
fprintf (dump_file, " Parm map: ");
for (i = 0; i < count; i++)
- fprintf (dump_file, " %i", (*parm_map)[i]);
+ fprintf (dump_file, " %i", (*parm_map)[i].parm_index);
fprintf (dump_file, "\n");
}
}
}
else
{
- auto_vec <int, 32> parm_map;
+ auto_vec <modref_parm_map, 32> parm_map;
compute_parm_map (edge, &parm_map);
}
- auto_vec <int, 32> parm_map;
+ auto_vec <modref_parm_map, 32> parm_map;
compute_parm_map (callee_edge, &parm_map);
}
/* TBAA checks did not disambiguate, try to use base pointer, for
- that we however need to have ref->ref. */
- if (ref_node->every_access || !ref->ref)
+ that we however need to have ref->ref or ref->base. */
+ if (ref_node->every_access || (!ref->ref && !ref->base))
return true;
modref_access_node *access_node;
>= gimple_call_num_args (stmt))
return true;
-
alias_stats.modref_baseptr_tests++;
- if (ptr_deref_may_alias_ref_p_1
- (gimple_call_arg (stmt, access_node->parm_index), ref))
+ tree arg = gimple_call_arg (stmt, access_node->parm_index);
+
+ if (integer_zerop (arg) && flag_delete_null_pointer_checks)
+ continue;
+
+ if (!POINTER_TYPE_P (TREE_TYPE (arg)))
return true;
+
+ /* ao_ref_init_from_ptr_and_range assumes that memory access
+ starts by the pointed to location. If we did not track the
+ offset it is possible that it starts before the actual
+ pointer. */
+ if (!access_node->parm_offset_known)
+ {
+ if (ptr_deref_may_alias_ref_p_1 (arg, ref))
+ return true;
+ }
+ else
+ {
+ ao_ref ref2;
+
+ ao_ref_init_from_ptr_and_range
+ (&ref2, arg, true,
+ access_node->offset
+ + (access_node->parm_offset
+ << LOG2_BITS_PER_UNIT), access_node->size,
+ access_node->max_size);
+ ref2.ref_alias_set = ref_set;
+ ref2.base_alias_set = base_set;
+ if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
+ return true;
+ }
num_tests++;
}
}