/* SSA operands management for trees.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "flags.h"
#include "function.h"
#include "diagnostic.h"
-#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
-
+#include "toplev.h"
#include "langhooks.h"
+#include "ipa-reference.h"
/* This file contains the code required to manage the operands cache of the
SSA optimizer. For every stmt, we maintain an operand cache in the stmt
The operand tree is the parsed by the various get_* routines which look
through the stmt tree for the occurrence of operands which may be of
interest, and calls are made to the append_* routines whenever one is
- found. There are 5 of these routines, each representing one of the
- 5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
- Virtual Must Defs.
+ found. There are 4 of these routines, each representing one of the
+ 4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
The append_* routines check for duplication, and simply keep a list of
unique objects for each operand type in the build_* extendable vectors.
Once the stmt tree is completely parsed, the finalize_ssa_operands()
routine is called, which proceeds to perform the finalization routine
- on each of the 5 operand vectors which have been built up.
+ on each of the 4 operand vectors which have been built up.
If the stmt had a previous operand cache, the finalization routines
attempt to match up the new operands with the old ones. If it's a perfect
i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new operand
vector for VUSE, then the new vector will also be modified such that
- it contains 'a_5' rather than 'a'.
+ it contains 'a_5' rather than 'a'. */
+
+
+/* Structure storing statistics on how many call clobbers we have, and
+ how many where avoided. */
+
+static struct
+{
+ /* Number of call-clobbered ops we attempt to add to calls in
+ add_call_clobbered_mem_symbols. */
+ unsigned int clobbered_vars;
+
+ /* Number of write-clobbers (VDEFs) avoided by using
+ not_written information. */
+ unsigned int static_write_clobbers_avoided;
+
+ /* Number of reads (VUSEs) avoided by using not_read information. */
+ unsigned int static_read_clobbers_avoided;
+
+ /* Number of write-clobbers avoided because the variable can't escape to
+ this call. */
+ unsigned int unescapable_clobbers_avoided;
-*/
+ /* Number of read-only uses we attempt to add to calls in
+ add_call_read_mem_symbols. */
+ unsigned int readonly_clobbers;
+
+ /* Number of read-only uses we avoid using not_read information. */
+ unsigned int static_readonly_clobbers_avoided;
+} clobber_stats;
/* Flags to describe operand properties in helpers. */
/* By default, operands are loaded. */
-#define opf_none 0
+#define opf_use 0
/* Operand is the target of an assignment expression or a
- call-clobbered variable */
-#define opf_is_def (1 << 0)
-
-/* Operand is the target of an assignment expression. */
-#define opf_kill_def (1 << 1)
+ call-clobbered variable. */
+#define opf_def (1 << 0)
/* No virtual operands should be created in the expression. This is used
when traversing ADDR_EXPR nodes which have different semantics than
need to consider are indices into arrays. For instance, &a.b[i] should
generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
VUSE for 'b'. */
-#define opf_no_vops (1 << 2)
+#define opf_no_vops (1 << 1)
+
+/* Operand is an implicit reference. This is used to distinguish
+ explicit assignments in the form of GIMPLE_MODIFY_STMT from
+ clobbering sites like function calls or ASM_EXPRs. */
+#define opf_implicit (1 << 2)
/* Array for building all the def operands. */
-static GTY (()) varray_type build_defs;
+static VEC(tree,heap) *build_defs;
/* Array for building all the use operands. */
-static GTY (()) varray_type build_uses;
-
-/* Array for building all the v_may_def operands. */
-static GTY (()) varray_type build_v_may_defs;
+static VEC(tree,heap) *build_uses;
-/* Array for building all the vuse operands. */
-static GTY (()) varray_type build_vuses;
+/* Set for building all the VDEF operands. */
+static VEC(tree,heap) *build_vdefs;
-/* Array for building all the v_must_def operands. */
-static GTY (()) varray_type build_v_must_defs;
+/* Set for building all the VUSE operands. */
+static VEC(tree,heap) *build_vuses;
-/* True if the operands for call clobbered vars are cached and valid. */
-bool ssa_call_clobbered_cache_valid;
-bool ssa_ro_call_cache_valid;
+/* Bitmap obstack for our datastructures that needs to survive across
+ compilations of multiple functions. */
+static bitmap_obstack operands_bitmap_obstack;
-/* These arrays are the cached operand vectors for call clobbered calls. */
-static GTY (()) varray_type clobbered_v_may_defs;
-static GTY (()) varray_type clobbered_vuses;
-static GTY (()) varray_type ro_call_vuses;
-static bool clobbered_aliased_loads;
-static bool clobbered_aliased_stores;
-static bool ro_call_aliased_loads;
-static stmt_operands_p parse_old_ops = NULL;
+/* Set for building all the loaded symbols. */
+static bitmap build_loads;
-def_operand_p NULL_DEF_OPERAND_P = { NULL };
+/* Set for building all the stored symbols. */
+static bitmap build_stores;
-static void note_addressable (tree, stmt_ann_t);
static void get_expr_operands (tree, tree *, int);
-static void get_asm_expr_operands (tree);
-static void get_indirect_ref_operands (tree, tree, int);
-static void get_call_expr_operands (tree, tree);
-static inline void append_def (tree *);
-static inline void append_use (tree *);
-static void append_v_may_def (tree);
-static void append_v_must_def (tree);
-static void add_call_clobber_ops (tree);
-static void add_call_read_ops (tree);
-static void add_stmt_operand (tree *, stmt_ann_t, int);
-/* Return a vector of contiguous memory for NUM def operands. */
+/* Number of functions with initialized ssa_operands. */
+static int n_initialized = 0;
-static inline def_optype
-allocate_def_optype (unsigned num)
-{
- def_optype def_ops;
- unsigned size;
- size = sizeof (struct def_optype_d) + sizeof (tree *) * (num - 1);
- def_ops = ggc_alloc (size);
- def_ops->num_defs = num;
- return def_ops;
-}
+/* Statement change buffer. Data structure used to record state
+ information for statements. This is used to determine what needs
+ to be done in order to update the SSA web after a statement is
+ modified by a pass. If STMT is a statement that has just been
+ created, or needs to be folded via fold_stmt, or anything that
+ changes its physical structure then the pass should:
+ 1- Call push_stmt_changes (&stmt) to record the current state of
+ STMT before any modifications are made.
-/* Return a vector of contiguous memory for NUM use operands. */
-
-static inline use_optype
-allocate_use_optype (unsigned num)
-{
- use_optype use_ops;
- unsigned size;
- size = sizeof (struct use_optype_d) + sizeof (use_operand_type_t) * (num - 1);
- use_ops = ggc_alloc (size);
- use_ops->num_uses = num;
- return use_ops;
-}
+ 2- Make all appropriate modifications to the statement.
+ 3- Call pop_stmt_changes (&stmt) to find new symbols that
+ need to be put in SSA form, SSA name mappings for names that
+ have disappeared, recompute invariantness for address
+ expressions, cleanup EH information, etc.
-/* Return a vector of contiguous memory for NUM v_may_def operands. */
+ If it is possible to determine that the statement was not modified,
+ instead of calling pop_stmt_changes it is quicker to call
+ discard_stmt_changes to avoid the expensive and unnecessary operand
+ re-scan and change comparison. */
-static inline v_may_def_optype
-allocate_v_may_def_optype (unsigned num)
+struct scb_d
{
- v_may_def_optype v_may_def_ops;
- unsigned size;
- size = sizeof (struct v_may_def_optype_d)
- + sizeof (v_def_use_operand_type_t) * (num - 1);
- v_may_def_ops = ggc_alloc (size);
- v_may_def_ops->num_v_may_defs = num;
- return v_may_def_ops;
-}
+ /* Pointer to the statement being modified. */
+ tree *stmt_p;
+
+ /* If the statement references memory these are the sets of symbols
+ loaded and stored by the statement. */
+ bitmap loads;
+ bitmap stores;
+};
+
+typedef struct scb_d *scb_t;
+DEF_VEC_P(scb_t);
+DEF_VEC_ALLOC_P(scb_t,heap);
+/* Stack of statement change buffers (SCB). Every call to
+ push_stmt_changes pushes a new buffer onto the stack. Calls to
+ pop_stmt_changes pop a buffer off of the stack and compute the set
+ of changes for the popped statement. */
+static VEC(scb_t,heap) *scb_stack;
-/* Return a vector of contiguous memory for NUM v_use operands. */
+/* Return the DECL_UID of the base variable of T. */
-static inline vuse_optype
-allocate_vuse_optype (unsigned num)
+static inline unsigned
+get_name_decl (tree t)
{
- vuse_optype vuse_ops;
- unsigned size;
- size = sizeof (struct vuse_optype_d)
- + sizeof (vuse_operand_type_t) * (num - 1);
- vuse_ops = ggc_alloc (size);
- vuse_ops->num_vuses = num;
- return vuse_ops;
+ if (TREE_CODE (t) != SSA_NAME)
+ return DECL_UID (t);
+ else
+ return DECL_UID (SSA_NAME_VAR (t));
}
-/* Return a vector of contiguous memory for NUM v_must_def operands. */
+/* Comparison function for qsort used in operand_build_sort_virtual. */
-static inline v_must_def_optype
-allocate_v_must_def_optype (unsigned num)
+static int
+operand_build_cmp (const void *p, const void *q)
{
- v_must_def_optype v_must_def_ops;
- unsigned size;
- size = sizeof (struct v_must_def_optype_d) + sizeof (v_def_use_operand_type_t) * (num - 1);
- v_must_def_ops = ggc_alloc (size);
- v_must_def_ops->num_v_must_defs = num;
- return v_must_def_ops;
+ tree e1 = *((const tree *)p);
+ tree e2 = *((const tree *)q);
+ unsigned int u1,u2;
+
+ u1 = get_name_decl (e1);
+ u2 = get_name_decl (e2);
+
+ /* We want to sort in ascending order. They can never be equal. */
+#ifdef ENABLE_CHECKING
+ gcc_assert (u1 != u2);
+#endif
+ return (u1 > u2 ? 1 : -1);
}
-/* Free memory for USES. */
+/* Sort the virtual operands in LIST from lowest DECL_UID to highest. */
static inline void
-free_uses (use_optype *uses)
+operand_build_sort_virtual (VEC(tree,heap) *list)
{
- if (*uses)
+ int num = VEC_length (tree, list);
+
+ if (num < 2)
+ return;
+
+ if (num == 2)
{
- unsigned int x;
- use_optype use = *uses;
- for (x = 0; x < use->num_uses; x++)
- delink_imm_use (&(use->uses[x]));
- ggc_free (*uses);
- *uses = NULL;
+ if (get_name_decl (VEC_index (tree, list, 0))
+ > get_name_decl (VEC_index (tree, list, 1)))
+ {
+ /* Swap elements if in the wrong order. */
+ tree tmp = VEC_index (tree, list, 0);
+ VEC_replace (tree, list, 0, VEC_index (tree, list, 1));
+ VEC_replace (tree, list, 1, tmp);
+ }
+ return;
}
+
+ /* There are 3 or more elements, call qsort. */
+ qsort (VEC_address (tree, list),
+ VEC_length (tree, list),
+ sizeof (tree),
+ operand_build_cmp);
}
-/* Free memory for DEFS. */
+/* Return true if the SSA operands cache is active. */
-static inline void
-free_defs (def_optype *defs)
+bool
+ssa_operands_active (void)
{
- if (*defs)
- {
- ggc_free (*defs);
- *defs = NULL;
- }
+ return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
}
-/* Free memory for VUSES. */
+/* VOPs are of variable sized, so the free list maps "free buckets" to the
+ following table:
+ bucket # operands
+ ------ ----------
+ 0 1
+ 1 2
+ ...
+ 15 16
+ 16 17-24
+ 17 25-32
+ 18 31-40
+ ...
+ 29 121-128
+ Any VOPs larger than this are simply added to the largest bucket when they
+ are freed. */
-static inline void
-free_vuses (vuse_optype *vuses)
+
+/* Return the number of operands used in bucket BUCKET. */
+
+static inline int
+vop_free_bucket_size (int bucket)
{
- if (*vuses)
- {
- unsigned int x;
- vuse_optype vuse = *vuses;
- for (x = 0; x < vuse->num_vuses; x++)
- delink_imm_use (&(vuse->vuses[x].imm_use));
- ggc_free (*vuses);
- *vuses = NULL;
- }
+#ifdef ENABLE_CHECKING
+ gcc_assert (bucket >= 0 && bucket < NUM_VOP_FREE_BUCKETS);
+#endif
+ if (bucket < 16)
+ return bucket + 1;
+ return (bucket - 13) * 8;
+}
+
+
+/* For a vop of NUM operands, return the bucket NUM belongs to. If NUM is
+ beyond the end of the bucket table, return -1. */
+
+static inline int
+vop_free_bucket_index (int num)
+{
+ gcc_assert (num > 0 && NUM_VOP_FREE_BUCKETS > 16);
+
+ /* Sizes 1 through 16 use buckets 0-15. */
+ if (num <= 16)
+ return num - 1;
+ /* Buckets 16 - NUM_VOP_FREE_BUCKETS represent 8 unit chunks. */
+ num = 14 + (num - 1) / 8;
+ if (num >= NUM_VOP_FREE_BUCKETS)
+ return -1;
+ else
+ return num;
}
-/* Free memory for V_MAY_DEFS. */
+/* Initialize the VOP free buckets. */
static inline void
-free_v_may_defs (v_may_def_optype *v_may_defs)
+init_vop_buckets (void)
{
- if (*v_may_defs)
- {
- unsigned int x;
- v_may_def_optype v_may_def = *v_may_defs;
- for (x = 0; x < v_may_def->num_v_may_defs; x++)
- delink_imm_use (&(v_may_def->v_may_defs[x].imm_use));
- ggc_free (*v_may_defs);
- *v_may_defs = NULL;
- }
+ int x;
+
+ for (x = 0; x < NUM_VOP_FREE_BUCKETS; x++)
+ gimple_ssa_operands (cfun)->vop_free_buckets[x] = NULL;
}
-/* Free memory for V_MUST_DEFS. */
+/* Add PTR to the appropriate VOP bucket. */
static inline void
-free_v_must_defs (v_must_def_optype *v_must_defs)
+add_vop_to_freelist (voptype_p ptr)
{
- if (*v_must_defs)
- {
- unsigned int x;
- v_must_def_optype v_must_def = *v_must_defs;
- for (x = 0; x < v_must_def->num_v_must_defs; x++)
- delink_imm_use (&(v_must_def->v_must_defs[x].imm_use));
- ggc_free (*v_must_defs);
- *v_must_defs = NULL;
- }
+ int bucket = vop_free_bucket_index (VUSE_VECT_NUM_ELEM (ptr->usev));
+
+ /* Too large, use the largest bucket so its not a complete throw away. */
+ if (bucket == -1)
+ bucket = NUM_VOP_FREE_BUCKETS - 1;
+
+ ptr->next = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
+ gimple_ssa_operands (cfun)->vop_free_buckets[bucket] = ptr;
}
+
+/* These are the sizes of the operand memory buffer which gets allocated each
+ time more operands space is required. The final value is the amount that is
+ allocated every time after that. */
+
+#define OP_SIZE_INIT 0
+#define OP_SIZE_1 30
+#define OP_SIZE_2 110
+#define OP_SIZE_3 511
/* Initialize the operand cache routines. */
void
init_ssa_operands (void)
{
- VARRAY_TREE_PTR_INIT (build_defs, 5, "build defs");
- VARRAY_TREE_PTR_INIT (build_uses, 10, "build uses");
- VARRAY_TREE_INIT (build_v_may_defs, 10, "build v_may_defs");
- VARRAY_TREE_INIT (build_vuses, 10, "build vuses");
- VARRAY_TREE_INIT (build_v_must_defs, 10, "build v_must_defs");
+ if (!n_initialized++)
+ {
+ build_defs = VEC_alloc (tree, heap, 5);
+ build_uses = VEC_alloc (tree, heap, 10);
+ build_vuses = VEC_alloc (tree, heap, 25);
+ build_vdefs = VEC_alloc (tree, heap, 25);
+ bitmap_obstack_initialize (&operands_bitmap_obstack);
+ build_loads = BITMAP_ALLOC (&operands_bitmap_obstack);
+ build_stores = BITMAP_ALLOC (&operands_bitmap_obstack);
+ scb_stack = VEC_alloc (scb_t, heap, 20);
+ }
+
+ gcc_assert (gimple_ssa_operands (cfun)->operand_memory == NULL);
+ gcc_assert (gimple_ssa_operands (cfun)->mpt_table == NULL);
+ gimple_ssa_operands (cfun)->operand_memory_index
+ = gimple_ssa_operands (cfun)->ssa_operand_mem_size;
+ gimple_ssa_operands (cfun)->ops_active = true;
+ memset (&clobber_stats, 0, sizeof (clobber_stats));
+ init_vop_buckets ();
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size = OP_SIZE_INIT;
}
void
fini_ssa_operands (void)
{
- ggc_free (build_defs);
- ggc_free (build_uses);
- ggc_free (build_v_may_defs);
- ggc_free (build_vuses);
- ggc_free (build_v_must_defs);
- build_defs = NULL;
- build_uses = NULL;
- build_v_may_defs = NULL;
- build_vuses = NULL;
- build_v_must_defs = NULL;
- if (clobbered_v_may_defs)
+ struct ssa_operand_memory_d *ptr;
+ unsigned ix;
+ tree mpt;
+
+ if (!--n_initialized)
{
- ggc_free (clobbered_v_may_defs);
- ggc_free (clobbered_vuses);
- clobbered_v_may_defs = NULL;
- clobbered_vuses = NULL;
+ VEC_free (tree, heap, build_defs);
+ VEC_free (tree, heap, build_uses);
+ VEC_free (tree, heap, build_vdefs);
+ VEC_free (tree, heap, build_vuses);
+ BITMAP_FREE (build_loads);
+ BITMAP_FREE (build_stores);
+
+ /* The change buffer stack had better be empty. */
+ gcc_assert (VEC_length (scb_t, scb_stack) == 0);
+ VEC_free (scb_t, heap, scb_stack);
+ scb_stack = NULL;
}
- if (ro_call_vuses)
+
+ gimple_ssa_operands (cfun)->free_defs = NULL;
+ gimple_ssa_operands (cfun)->free_uses = NULL;
+
+ while ((ptr = gimple_ssa_operands (cfun)->operand_memory) != NULL)
{
- ggc_free (ro_call_vuses);
- ro_call_vuses = NULL;
+ gimple_ssa_operands (cfun)->operand_memory
+ = gimple_ssa_operands (cfun)->operand_memory->next;
+ ggc_free (ptr);
}
-}
-/* Initialize V_USES index INDEX to VAL for STMT. If OLD is present, preserve
- the position of the may-def in the immediate_use list. */
+ for (ix = 0;
+ VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, ix, mpt);
+ ix++)
+ {
+ if (mpt)
+ BITMAP_FREE (MPT_SYMBOLS (mpt));
+ }
-static inline void
-initialize_vuse_operand (vuse_optype vuses, unsigned int index, tree val,
- tree stmt, ssa_imm_use_t *old)
-{
- vuse_operand_type_t *ptr;
- ptr = &(vuses->vuses[index]);
- ptr->use = val;
- ptr->imm_use.use = &(ptr->use);
- if (old)
- relink_imm_use_stmt (&(ptr->imm_use), old, stmt);
- else
- link_imm_use_stmt (&(ptr->imm_use), ptr->use, stmt);
+ VEC_free (tree, heap, gimple_ssa_operands (cfun)->mpt_table);
+
+ gimple_ssa_operands (cfun)->ops_active = false;
+
+ if (!n_initialized)
+ bitmap_obstack_release (&operands_bitmap_obstack);
+ if (dump_file && (dump_flags & TDF_STATS))
+ {
+ fprintf (dump_file, "Original clobbered vars: %d\n",
+ clobber_stats.clobbered_vars);
+ fprintf (dump_file, "Static write clobbers avoided: %d\n",
+ clobber_stats.static_write_clobbers_avoided);
+ fprintf (dump_file, "Static read clobbers avoided: %d\n",
+ clobber_stats.static_read_clobbers_avoided);
+ fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
+ clobber_stats.unescapable_clobbers_avoided);
+ fprintf (dump_file, "Original read-only clobbers: %d\n",
+ clobber_stats.readonly_clobbers);
+ fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
+ clobber_stats.static_readonly_clobbers_avoided);
+ }
}
-/* Initialize V_MAY_DEF_OPS index X to be DEF = MAY_DEF <USE> for STMT. If
- OLD is present, preserve the position of the may-def in the immediate_use
- list. */
+/* Return memory for operands of SIZE chunks. */
+
+static inline void *
+ssa_operand_alloc (unsigned size)
+{
+ char *ptr;
-static inline void
-initialize_v_may_def_operand (v_may_def_optype v_may_def_ops, unsigned int x,
- tree def, tree use, tree stmt, ssa_imm_use_t *old)
-{
- v_def_use_operand_type_t *ptr;
- ptr = &(v_may_def_ops->v_may_defs[x]);
- ptr->def = def;
- ptr->use = use;
- ptr->imm_use.use = &(ptr->use);
- if (old)
- relink_imm_use_stmt (&(ptr->imm_use), old, stmt);
- else
- link_imm_use_stmt (&(ptr->imm_use), ptr->use, stmt);
+ if (gimple_ssa_operands (cfun)->operand_memory_index + size
+ >= gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ {
+ struct ssa_operand_memory_d *ptr;
+
+ if (gimple_ssa_operands (cfun)->ssa_operand_mem_size == OP_SIZE_INIT)
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_1 * sizeof (struct voptype_d);
+ else
+ if (gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ == OP_SIZE_1 * sizeof (struct voptype_d))
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_2 * sizeof (struct voptype_d);
+ else
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_3 * sizeof (struct voptype_d);
+
+ /* Go right to the maximum size if the request is too large. */
+ if (size > gimple_ssa_operands (cfun)->ssa_operand_mem_size)
+ gimple_ssa_operands (cfun)->ssa_operand_mem_size
+ = OP_SIZE_3 * sizeof (struct voptype_d);
+
+ /* Fail if there is not enough space. If there are this many operands
+ required, first make sure there isn't a different problem causing this
+ many operands. If the decision is that this is OK, then we can
+ specially allocate a buffer just for this request. */
+ gcc_assert (size <= gimple_ssa_operands (cfun)->ssa_operand_mem_size);
+
+ ptr = (struct ssa_operand_memory_d *)
+ ggc_alloc (sizeof (struct ssa_operand_memory_d)
+ + gimple_ssa_operands (cfun)->ssa_operand_mem_size - 1);
+ ptr->next = gimple_ssa_operands (cfun)->operand_memory;
+ gimple_ssa_operands (cfun)->operand_memory = ptr;
+ gimple_ssa_operands (cfun)->operand_memory_index = 0;
+ }
+ ptr = &(gimple_ssa_operands (cfun)->operand_memory
+ ->mem[gimple_ssa_operands (cfun)->operand_memory_index]);
+ gimple_ssa_operands (cfun)->operand_memory_index += size;
+ return ptr;
}
-/* Initialize V_MUST_DEF_OPS index X to be DEF = MUST_DEF <USE> for STMT. If
- OLD is present, preserve the position of the may-def in the immediate_use
- list. */
+/* Allocate a DEF operand. */
-static inline void
-initialize_v_must_def_operand (v_must_def_optype v_must_def_ops, unsigned int x,
- tree def, tree use, tree stmt, ssa_imm_use_t *old)
-{
- v_def_use_operand_type_t *ptr;
- ptr = &(v_must_def_ops->v_must_defs[x]);
- ptr->def = def;
- ptr->use = use;
- ptr->imm_use.use = &(ptr->use);
- if (old)
- relink_imm_use_stmt (&(ptr->imm_use), old, stmt);
+static inline struct def_optype_d *
+alloc_def (void)
+{
+ struct def_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_defs)
+ {
+ ret = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs
+ = gimple_ssa_operands (cfun)->free_defs->next;
+ }
else
- link_imm_use_stmt (&(ptr->imm_use), ptr->use, stmt);
+ ret = (struct def_optype_d *)
+ ssa_operand_alloc (sizeof (struct def_optype_d));
+ return ret;
}
-/* All the finalize_ssa_* routines do the work required to turn the build_
- VARRAY into an operand_vector of the appropriate type. The original vector,
- if any, is passed in for comparison and virtual SSA_NAME reuse. If the
- old vector is reused, the pointer passed in is set to NULL so that
- the memory is not freed when the old operands are freed. */
-/* Return a new def operand vector for STMT, comparing to OLD_OPS_P. */
+/* Allocate a USE operand. */
-static def_optype
-finalize_ssa_defs (def_optype *old_ops_p, tree stmt)
+static inline struct use_optype_d *
+alloc_use (void)
{
- unsigned num, x;
- def_optype def_ops, old_ops;
- bool build_diff;
-
- num = VARRAY_ACTIVE_SIZE (build_defs);
- if (num == 0)
- return NULL;
+ struct use_optype_d *ret;
+ if (gimple_ssa_operands (cfun)->free_uses)
+ {
+ ret = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses
+ = gimple_ssa_operands (cfun)->free_uses->next;
+ }
+ else
+ ret = (struct use_optype_d *)
+ ssa_operand_alloc (sizeof (struct use_optype_d));
+ return ret;
+}
- /* There should only be a single real definition per assignment. */
- gcc_assert ((stmt && TREE_CODE (stmt) != MODIFY_EXPR) || num <= 1);
- old_ops = *old_ops_p;
+/* Allocate a vop with NUM elements. */
- /* Compare old vector and new array. */
- build_diff = true;
- if (stmt && old_ops && old_ops->num_defs == num)
- {
- build_diff = false;
- for (x = 0; x < num; x++)
- if (old_ops->defs[x].def != VARRAY_TREE_PTR (build_defs, x))
- {
- build_diff = true;
- break;
- }
- }
+static inline struct voptype_d *
+alloc_vop (int num)
+{
+ struct voptype_d *ret = NULL;
+ int alloc_size = 0;
- if (!build_diff)
+ int bucket = vop_free_bucket_index (num);
+ if (bucket != -1)
{
- def_ops = old_ops;
- *old_ops_p = NULL;
+ /* If there is a free operand, use it. */
+ if (gimple_ssa_operands (cfun)->vop_free_buckets[bucket] != NULL)
+ {
+ ret = gimple_ssa_operands (cfun)->vop_free_buckets[bucket];
+ gimple_ssa_operands (cfun)->vop_free_buckets[bucket] =
+ gimple_ssa_operands (cfun)->vop_free_buckets[bucket]->next;
+ }
+ else
+ alloc_size = vop_free_bucket_size(bucket);
}
else
- {
- def_ops = allocate_def_optype (num);
- for (x = 0; x < num ; x++)
- def_ops->defs[x].def = VARRAY_TREE_PTR (build_defs, x);
- }
+ alloc_size = num;
- VARRAY_POP_ALL (build_defs);
+ if (alloc_size > 0)
+ ret = (struct voptype_d *)ssa_operand_alloc (
+ sizeof (struct voptype_d) + (alloc_size - 1) * sizeof (vuse_element_t));
- return def_ops;
+ VUSE_VECT_NUM_ELEM (ret->usev) = num;
+ return ret;
}
-/* Make sure PTR is inn the correct immediate use list. Since uses are simply
- pointers into the stmt TREE, there is no way of telling if anyone has
- changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
- THe contents are different, but the the pointer is still the same. This
- routine will check to make sure PTR is in the correct list, and if it isn't
- put it in the correct list. We cannot simply check the previous node
- because all nodes in the same stmt might have be changed. */
+/* This routine makes sure that PTR is in an immediate use list, and makes
+ sure the stmt pointer is set to the current stmt. */
static inline void
-correct_use_link (ssa_imm_use_t *ptr, tree stmt)
+set_virtual_use_link (use_operand_p ptr, tree stmt)
{
- ssa_imm_use_t *prev;
- tree root;
-
- /* Fold_stmt () may have changed the stmt pointers. */
+ /* fold_stmt may have changed the stmt pointers. */
if (ptr->stmt != stmt)
ptr->stmt = stmt;
- prev = ptr->prev;
- if (prev)
- {
- bool stmt_mod = true;
- /* Find the first element which isn't a SAFE iterator, is in a different
- stmt, and is not a a modified stmt, That node is in the correct list,
- see if we are too. */
+ /* If this use isn't in a list, add it to the correct list. */
+ if (!ptr->prev)
+ link_imm_use (ptr, *(ptr->use));
+}
- while (stmt_mod)
- {
- while (prev->stmt == stmt || prev->stmt == NULL)
- prev = prev->prev;
- if (prev->use == NULL)
- stmt_mod = false;
- else
- if ((stmt_mod = stmt_modified_p (prev->stmt)))
- prev = prev->prev;
- }
- /* Get the ssa_name of the list the node is in. */
- if (prev->use == NULL)
- root = prev->stmt;
- else
- root = *(prev->use);
- /* If it's the right list, simply return. */
- if (root == *(ptr->use))
- return;
- }
- /* Its in the wrong list if we reach here. */
- delink_imm_use (ptr);
- link_imm_use (ptr, *(ptr->use));
+/* Adds OP to the list of defs after LAST. */
+
+static inline def_optype_p
+add_def_op (tree *op, def_optype_p last)
+{
+ def_optype_p new;
+
+ new = alloc_def ();
+ DEF_OP_PTR (new) = op;
+ last->next = new;
+ new->next = NULL;
+ return new;
}
-/* Return a new use operand vector for STMT, comparing to OLD_OPS_P. */
+/* Adds OP to the list of uses of statement STMT after LAST. */
-static use_optype
-finalize_ssa_uses (use_optype *old_ops_p, tree stmt)
+static inline use_optype_p
+add_use_op (tree stmt, tree *op, use_optype_p last)
{
- unsigned num, x, num_old, i;
- use_optype use_ops, old_ops;
- bool build_diff;
+ use_optype_p new;
+
+ new = alloc_use ();
+ USE_OP_PTR (new)->use = op;
+ link_imm_use_stmt (USE_OP_PTR (new), *op, stmt);
+ last->next = new;
+ new->next = NULL;
+ return new;
+}
- num = VARRAY_ACTIVE_SIZE (build_uses);
- if (num == 0)
- return NULL;
-#ifdef ENABLE_CHECKING
- {
- unsigned x;
- /* If the pointer to the operand is the statement itself, something is
- wrong. It means that we are pointing to a local variable. */
- for (x = 0; x < num; x++)
- gcc_assert (*(VARRAY_TREE_PTR (build_uses, x)) != stmt);
- }
-#endif
- old_ops = *old_ops_p;
- num_old = ((stmt && old_ops) ? old_ops->num_uses : 0);
+/* Return a virtual op pointer with NUM elements which are all initialized to OP
+ and are linked into the immediate uses for STMT. The new vop is appended
+ after PREV. */
- /* Check if the old vector and the new array are the same. */
- build_diff = true;
- if (stmt && old_ops && num_old == num)
- {
- build_diff = false;
- for (x = 0; x < num; x++)
- {
- tree *var_p = VARRAY_TREE_PTR (build_uses, x);
- tree *node = old_ops->uses[x].use;
- /* Check the pointer values to see if they are the same. */
- if (node != var_p)
- {
- build_diff = true;
- break;
- }
- }
- }
+static inline voptype_p
+add_vop (tree stmt, tree op, int num, voptype_p prev)
+{
+ voptype_p new;
+ int x;
- if (!build_diff)
- {
- use_ops = old_ops;
- *old_ops_p = NULL;
- for (i = 0; i < num_old; i++)
- correct_use_link (&(use_ops->uses[i]), stmt);
- }
- else
+ new = alloc_vop (num);
+ for (x = 0; x < num; x++)
{
- use_ops = allocate_use_optype (num);
- for (x = 0; x < num ; x++)
- {
- tree *var = VARRAY_TREE_PTR (build_uses, x);
- use_ops->uses[x].use = var;
- for (i = 0; i < num_old; i++)
- {
- ssa_imm_use_t *ptr = &(old_ops->uses[i]);
- if (ptr->use == var)
- {
- relink_imm_use_stmt (&(use_ops->uses[x]), ptr, stmt);
- correct_use_link (&(use_ops->uses[x]), stmt);
- break;
- }
- }
- if (i == num_old)
- link_imm_use_stmt (&(use_ops->uses[x]), *var, stmt);
- }
+ VUSE_OP_PTR (new, x)->prev = NULL;
+ SET_VUSE_OP (new, x, op);
+ VUSE_OP_PTR (new, x)->use = &new->usev.uses[x].use_var;
+ link_imm_use_stmt (VUSE_OP_PTR (new, x), new->usev.uses[x].use_var, stmt);
}
- VARRAY_POP_ALL (build_uses);
- return use_ops;
+ if (prev)
+ prev->next = new;
+ new->next = NULL;
+ return new;
+}
+
+
+/* Adds OP to the list of vuses of statement STMT after LAST, and moves
+ LAST to the new element. */
+
+static inline voptype_p
+add_vuse_op (tree stmt, tree op, int num, voptype_p last)
+{
+ voptype_p new = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new) = NULL_TREE;
+ return new;
}
-/* Return a new v_may_def operand vector for STMT, comparing to OLD_OPS_P. */
+/* Adds OP to the list of vdefs of statement STMT after LAST, and moves
+ LAST to the new element. */
+
+static inline voptype_p
+add_vdef_op (tree stmt, tree op, int num, voptype_p last)
+{
+ voptype_p new = add_vop (stmt, op, num, last);
+ VDEF_RESULT (new) = op;
+ return new;
+}
+
+
+/* Reallocate the virtual operand PTR so that it has NUM_ELEM use slots. ROOT
+ is the head of the operand list it belongs to. */
-static v_may_def_optype
-finalize_ssa_v_may_defs (v_may_def_optype *old_ops_p, tree stmt)
+static inline struct voptype_d *
+realloc_vop (struct voptype_d *ptr, unsigned int num_elem,
+ struct voptype_d **root)
{
- unsigned num, x, i, old_num;
- v_may_def_optype v_may_def_ops, old_ops;
- tree result, var;
- bool build_diff;
+ unsigned int x, lim;
+ tree stmt, val;
+ struct voptype_d *ret, *tmp;
- num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
- if (num == 0)
- return NULL;
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) == num_elem)
+ return ptr;
- old_ops = *old_ops_p;
+ val = VUSE_OP (ptr, 0);
+ if (TREE_CODE (val) == SSA_NAME)
+ val = SSA_NAME_VAR (val);
- /* Check if the old vector and the new array are the same. */
- build_diff = true;
- if (stmt && old_ops && old_ops->num_v_may_defs == num)
+ stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
+
+ /* Delink all the existing uses. */
+ for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
{
- old_num = num;
- build_diff = false;
- for (x = 0; x < num; x++)
- {
- var = old_ops->v_may_defs[x].def;
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- if (var != VARRAY_TREE (build_v_may_defs, x))
- {
- build_diff = true;
- break;
- }
- }
+ use_operand_p use_p = VUSE_OP_PTR (ptr, x);
+ delink_imm_use (use_p);
}
- else
- old_num = (old_ops ? old_ops->num_v_may_defs : 0);
- if (!build_diff)
+ /* If we want less space, simply use this one, and shrink the size. */
+ if (VUSE_VECT_NUM_ELEM (ptr->usev) > num_elem)
{
- v_may_def_ops = old_ops;
- *old_ops_p = NULL;
- for (x = 0; x < num; x++)
- correct_use_link (&(v_may_def_ops->v_may_defs[x].imm_use), stmt);
+ VUSE_VECT_NUM_ELEM (ptr->usev) = num_elem;
+ return ptr;
}
- else
+
+ /* It is growing. Allocate a new one and replace the old one. */
+ ret = add_vuse_op (stmt, val, num_elem, ptr);
+
+ /* Clear PTR and add its memory to the free list. */
+ lim = VUSE_VECT_NUM_ELEM (ptr->usev);
+ memset (ptr, 0,
+ sizeof (struct voptype_d) + sizeof (vuse_element_t) * (lim- 1));
+ add_vop_to_freelist (ptr);
+
+ /* Now simply remove the old one. */
+ if (*root == ptr)
{
- v_may_def_ops = allocate_v_may_def_optype (num);
- for (x = 0; x < num; x++)
- {
- var = VARRAY_TREE (build_v_may_defs, x);
- /* Look for VAR in the old operands vector. */
- for (i = 0; i < old_num; i++)
- {
- result = old_ops->v_may_defs[i].def;
- if (TREE_CODE (result) == SSA_NAME)
- result = SSA_NAME_VAR (result);
- if (result == var)
- {
- initialize_v_may_def_operand (v_may_def_ops, x,
- old_ops->v_may_defs[i].def,
- old_ops->v_may_defs[i].use,
- stmt,
- &(old_ops->v_may_defs[i].imm_use));
- break;
- }
- }
- if (i == old_num)
- {
- initialize_v_may_def_operand (v_may_def_ops, x, var, var, stmt,
- NULL);
- }
- }
+ *root = ret;
+ return ret;
}
+ else
+ for (tmp = *root;
+ tmp != NULL && tmp->next != ptr;
+ tmp = tmp->next)
+ {
+ tmp->next = ret;
+ return ret;
+ }
+
+ /* The pointer passed in isn't in STMT's VDEF lists. */
+ gcc_unreachable ();
+}
+
+
+/* Reallocate the PTR vdef so that it has NUM_ELEM use slots. */
+
+struct voptype_d *
+realloc_vdef (struct voptype_d *ptr, unsigned int num_elem)
+{
+ tree val, stmt;
+ struct voptype_d *ret;
+
+ val = VDEF_RESULT (ptr);
+ stmt = USE_STMT (VDEF_OP_PTR (ptr, 0));
+ ret = realloc_vop (ptr, num_elem, &(VDEF_OPS (stmt)));
+ VDEF_RESULT (ret) = val;
+ return ret;
+}
+
+
+/* Reallocate the PTR vuse so that it has NUM_ELEM use slots. */
- /* Empty the V_MAY_DEF build vector after VUSES have been processed. */
+struct voptype_d *
+realloc_vuse (struct voptype_d *ptr, unsigned int num_elem)
+{
+ tree stmt;
+ struct voptype_d *ret;
- return v_may_def_ops;
+ stmt = USE_STMT (VUSE_OP_PTR (ptr, 0));
+ ret = realloc_vop (ptr, num_elem, &(VUSE_OPS (stmt)));
+ return ret;
}
-/* Clear the in_list bits and empty the build array for v_may_defs. */
+/* Takes elements from build_defs and turns them into def operands of STMT.
+ TODO -- Make build_defs VEC of tree *. */
static inline void
-cleanup_v_may_defs (void)
+finalize_ssa_defs (tree stmt)
{
- unsigned x, num;
- num = VARRAY_ACTIVE_SIZE (build_v_may_defs);
+ unsigned new_i;
+ struct def_optype_d new_list;
+ def_optype_p old_ops, last;
+ unsigned int num = VEC_length (tree, build_defs);
- for (x = 0; x < num; x++)
+ /* There should only be a single real definition per assignment. */
+ gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
+
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = DEF_OPS (stmt);
+
+ new_i = 0;
+
+ /* Check for the common case of 1 def that hasn't changed. */
+ if (old_ops && old_ops->next == NULL && num == 1
+ && (tree *) VEC_index (tree, build_defs, 0) == DEF_OP_PTR (old_ops))
+ return;
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- tree t = VARRAY_TREE (build_v_may_defs, x);
- var_ann_t ann = var_ann (t);
- ann->in_v_may_def_list = 0;
+ old_ops->next = gimple_ssa_operands (cfun)->free_defs;
+ gimple_ssa_operands (cfun)->free_defs = old_ops;
}
- VARRAY_POP_ALL (build_v_may_defs);
+
+ /* If there is anything remaining in the build_defs list, simply emit it. */
+ for ( ; new_i < num; new_i++)
+ last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
+
+ /* Now set the stmt's operands. */
+ DEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ def_optype_p ptr;
+ unsigned x = 0;
+ for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == num);
+ }
+#endif
}
-/* Return a new vuse operand vector, comparing to OLD_OPS_P. */
-static vuse_optype
-finalize_ssa_vuses (vuse_optype *old_ops_p, tree stmt)
+/* Takes elements from build_uses and turns them into use operands of STMT.
+ TODO -- Make build_uses VEC of tree *. */
+
+static inline void
+finalize_ssa_uses (tree stmt)
{
- unsigned num, x, i, num_v_may_defs, old_num;
- vuse_optype vuse_ops, old_ops;
- bool build_diff;
+ unsigned new_i;
+ struct use_optype_d new_list;
+ use_optype_p old_ops, ptr, last;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x;
+ unsigned num = VEC_length (tree, build_uses);
+
+ /* If the pointer to the operand is the statement itself, something is
+ wrong. It means that we are pointing to a local variable (the
+ initial call to update_stmt_operands does not pass a pointer to a
+ statement). */
+ for (x = 0; x < num; x++)
+ gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
+ }
+#endif
- num = VARRAY_ACTIVE_SIZE (build_vuses);
- if (num == 0)
+ new_list.next = NULL;
+ last = &new_list;
+
+ old_ops = USE_OPS (stmt);
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- cleanup_v_may_defs ();
- return NULL;
+ for (ptr = old_ops; ptr; ptr = ptr->next)
+ delink_imm_use (USE_OP_PTR (ptr));
+ old_ops->next = gimple_ssa_operands (cfun)->free_uses;
+ gimple_ssa_operands (cfun)->free_uses = old_ops;
}
- /* Remove superfluous VUSE operands. If the statement already has a
- V_MAY_DEF operation for a variable 'a', then a VUSE for 'a' is not
- needed because V_MAY_DEFs imply a VUSE of the variable. For instance,
- suppose that variable 'a' is aliased:
+ /* Now create nodes for all the new nodes. */
+ for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
+ last = add_use_op (stmt,
+ (tree *) VEC_index (tree, build_uses, new_i),
+ last);
+
+ /* Now set the stmt's operands. */
+ USE_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
+
+ gcc_assert (x == VEC_length (tree, build_uses));
+ }
+#endif
+}
- # VUSE <a_2>
- # a_3 = V_MAY_DEF <a_2>
- a = a + 1;
- The VUSE <a_2> is superfluous because it is implied by the V_MAY_DEF
- operation. */
+/* Takes elements from BUILD_VDEFS and turns them into vdef operands of
+ STMT. FIXME, for now VDEF operators should have a single operand
+ in their RHS. */
- num_v_may_defs = VARRAY_ACTIVE_SIZE (build_v_may_defs);
+static inline void
+finalize_ssa_vdefs (tree stmt)
+{
+ unsigned new_i;
+ struct voptype_d new_list;
+ voptype_p old_ops, ptr, last;
+ stmt_ann_t ann = stmt_ann (stmt);
- if (num_v_may_defs > 0)
+ /* Set the symbols referenced by STMT. */
+ if (!bitmap_empty_p (build_stores))
{
- size_t i;
- tree vuse;
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- {
- vuse = VARRAY_TREE (build_vuses, i);
- if (TREE_CODE (vuse) != SSA_NAME)
- {
- var_ann_t ann = var_ann (vuse);
- ann->in_vuse_list = 0;
- if (ann->in_v_may_def_list)
- {
- /* If we found a useless VUSE operand, remove it from the
- operand array by replacing it with the last active element
- in the operand array (unless the useless VUSE was the
- last operand, in which case we simply remove it. */
- if (i != VARRAY_ACTIVE_SIZE (build_vuses) - 1)
- {
- VARRAY_TREE (build_vuses, i)
- = VARRAY_TREE (build_vuses,
- VARRAY_ACTIVE_SIZE (build_vuses) - 1);
- }
- VARRAY_POP (build_vuses);
-
- /* We want to rescan the element at this index, unless
- this was the last element, in which case the loop
- terminates. */
- i--;
- }
- }
- }
+ if (ann->operands.stores == NULL)
+ ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
+
+ bitmap_copy (ann->operands.stores, build_stores);
}
else
- /* Clear out the in_list bits. */
- for (x = 0; x < num; x++)
- {
- tree t = VARRAY_TREE (build_vuses, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
+ BITMAP_FREE (ann->operands.stores);
+
+ /* If aliases have not been computed, do not instantiate a virtual
+ operator on STMT. Initially, we only compute the SSA form on
+ GIMPLE registers. The virtual SSA form is only computed after
+ alias analysis, so virtual operators will remain unrenamed and
+ the verifier will complain. However, alias analysis needs to
+ access symbol load/store information, so we need to compute
+ those. */
+ if (!gimple_aliases_computed_p (cfun))
+ return;
+ new_list.next = NULL;
+ last = &new_list;
- num = VARRAY_ACTIVE_SIZE (build_vuses);
- /* We could have reduced the size to zero now, however. */
- if (num == 0)
+ old_ops = VDEF_OPS (stmt);
+ new_i = 0;
+ while (old_ops && new_i < VEC_length (tree, build_vdefs))
{
- cleanup_v_may_defs ();
- return NULL;
- }
+ tree op = VEC_index (tree, build_vdefs, new_i);
+ unsigned new_uid = get_name_decl (op);
+ unsigned old_uid = get_name_decl (VDEF_RESULT (old_ops));
- old_ops = *old_ops_p;
+ /* FIXME, for now each VDEF operator should have at most one
+ operand in their RHS. */
+ gcc_assert (VDEF_NUM (old_ops) == 1);
- /* Determine whether vuses is the same as the old vector. */
- build_diff = true;
- if (stmt && old_ops && old_ops->num_vuses == num)
- {
- old_num = num;
- build_diff = false;
- for (x = 0; x < num ; x++)
+ if (old_uid == new_uid)
{
- tree v;
- v = old_ops->vuses[x].use;
- if (TREE_CODE (v) == SSA_NAME)
- v = SSA_NAME_VAR (v);
- if (v != VARRAY_TREE (build_vuses, x))
- {
- build_diff = true;
- break;
- }
+ /* If the symbols are the same, reuse the existing operand. */
+ last->next = old_ops;
+ last = old_ops;
+ old_ops = old_ops->next;
+ last->next = NULL;
+ set_virtual_use_link (VDEF_OP_PTR (last, 0), stmt);
+ new_i++;
+ }
+ else if (old_uid < new_uid)
+ {
+ /* If old is less than new, old goes to the free list. */
+ voptype_p next;
+ delink_imm_use (VDEF_OP_PTR (old_ops, 0));
+ next = old_ops->next;
+ add_vop_to_freelist (old_ops);
+ old_ops = next;
+ }
+ else
+ {
+ /* This is a new operand. */
+ last = add_vdef_op (stmt, op, 1, last);
+ new_i++;
}
}
- else
- old_num = (old_ops ? old_ops->num_vuses : 0);
- if (!build_diff)
- {
- vuse_ops = old_ops;
- *old_ops_p = NULL;
- for (x = 0; x < num; x++)
- correct_use_link (&(vuse_ops->vuses[x].imm_use), stmt);
- }
- else
+ /* If there is anything remaining in BUILD_VDEFS, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
+ last = add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, last);
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- vuse_ops = allocate_vuse_optype (num);
- for (x = 0; x < num; x++)
+ for (ptr = old_ops; ptr; ptr = last)
{
- tree result, var = VARRAY_TREE (build_vuses, x);
- /* Look for VAR in the old vector, and use that SSA_NAME. */
- for (i = 0; i < old_num; i++)
- {
- result = old_ops->vuses[i].use;
- if (TREE_CODE (result) == SSA_NAME)
- result = SSA_NAME_VAR (result);
- if (result == var)
- {
- initialize_vuse_operand (vuse_ops, x, old_ops->vuses[i].use,
- stmt, &(old_ops->vuses[i].imm_use));
- break;
- }
- }
- if (i == old_num)
- initialize_vuse_operand (vuse_ops, x, var, stmt, NULL);
+ last = ptr->next;
+ delink_imm_use (VDEF_OP_PTR (ptr, 0));
+ add_vop_to_freelist (ptr);
}
}
- /* The v_may_def build vector wasn't freed because we needed it here.
- Free it now with the vuses build vector. */
- VARRAY_POP_ALL (build_vuses);
- cleanup_v_may_defs ();
+ /* Now set STMT's operands. */
+ VDEF_OPS (stmt) = new_list.next;
+
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x = 0;
+ for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
+ x++;
- return vuse_ops;
+ gcc_assert (x == VEC_length (tree, build_vdefs));
+ }
+#endif
}
-/* Return a new v_must_def operand vector for STMT, comparing to OLD_OPS_P. */
-static v_must_def_optype
-finalize_ssa_v_must_defs (v_must_def_optype *old_ops_p, tree stmt)
+/* Takes elements from BUILD_VUSES and turns them into VUSE operands of
+ STMT. */
+
+static inline void
+finalize_ssa_vuse_ops (tree stmt)
{
- unsigned num, x, i, old_num = 0;
- v_must_def_optype v_must_def_ops, old_ops;
- tree result, var;
- bool build_diff;
+ unsigned new_i, old_i;
+ voptype_p old_ops, last;
+ VEC(tree,heap) *new_ops;
+ stmt_ann_t ann;
- num = VARRAY_ACTIVE_SIZE (build_v_must_defs);
- if (num == 0)
- return NULL;
+ /* Set the symbols referenced by STMT. */
+ ann = stmt_ann (stmt);
+ if (!bitmap_empty_p (build_loads))
+ {
+ if (ann->operands.loads == NULL)
+ ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
- /* In the presence of subvars, there may be more than one V_MUST_DEF per
- statement (one for each subvar). It is a bit expensive to verify that
- all must-defs in a statement belong to subvars if there is more than one
- MUST-def, so we don't do it. Suffice to say, if you reach here without
- having subvars, and have num >1, you have hit a bug. */
-
+ bitmap_copy (ann->operands.loads, build_loads);
+ }
+ else
+ BITMAP_FREE (ann->operands.loads);
+
+ /* If aliases have not been computed, do not instantiate a virtual
+ operator on STMT. Initially, we only compute the SSA form on
+ GIMPLE registers. The virtual SSA form is only computed after
+ alias analysis, so virtual operators will remain unrenamed and
+ the verifier will complain. However, alias analysis needs to
+ access symbol load/store information, so we need to compute
+ those. */
+ if (!gimple_aliases_computed_p (cfun))
+ return;
- old_ops = *old_ops_p;
+ /* STMT should have at most one VUSE operator. */
+ old_ops = VUSE_OPS (stmt);
+ gcc_assert (old_ops == NULL || old_ops->next == NULL);
- /* Check if the old vector and the new array are the same. */
- build_diff = true;
- if (stmt && old_ops && old_ops->num_v_must_defs == num)
+ new_ops = NULL;
+ new_i = old_i = 0;
+ while (old_ops
+ && old_i < VUSE_NUM (old_ops)
+ && new_i < VEC_length (tree, build_vuses))
{
- old_num = num;
- build_diff = false;
- for (x = 0; x < num; x++)
+ tree new_op = VEC_index (tree, build_vuses, new_i);
+ tree old_op = VUSE_OP (old_ops, old_i);
+ unsigned new_uid = get_name_decl (new_op);
+ unsigned old_uid = get_name_decl (old_op);
+
+ if (old_uid == new_uid)
{
- tree var = old_ops->v_must_defs[x].def;
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- if (var != VARRAY_TREE (build_v_must_defs, x))
- {
- build_diff = true;
- break;
- }
+ /* If the symbols are the same, reuse the existing operand. */
+ VEC_safe_push (tree, heap, new_ops, old_op);
+ new_i++;
+ old_i++;
+ }
+ else if (old_uid < new_uid)
+ {
+ /* If OLD_UID is less than NEW_UID, the old operand has
+ disappeared, skip to the next old operand. */
+ old_i++;
+ }
+ else
+ {
+ /* This is a new operand. */
+ VEC_safe_push (tree, heap, new_ops, new_op);
+ new_i++;
}
}
- else
- old_num = (old_ops ? old_ops->num_v_must_defs : 0);
- if (!build_diff)
+ /* If there is anything remaining in the build_vuses list, simply emit it. */
+ for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
+ VEC_safe_push (tree, heap, new_ops, VEC_index (tree, build_vuses, new_i));
+
+ /* If there is anything in the old list, free it. */
+ if (old_ops)
{
- v_must_def_ops = old_ops;
- *old_ops_p = NULL;
- for (x = 0; x < num; x++)
- correct_use_link (&(v_must_def_ops->v_must_defs[x].imm_use), stmt);
+ for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
+ delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
+ add_vop_to_freelist (old_ops);
+ VUSE_OPS (stmt) = NULL;
}
- else
+
+ /* If there are any operands, instantiate a VUSE operator for STMT. */
+ if (new_ops)
{
- v_must_def_ops = allocate_v_must_def_optype (num);
- for (x = 0; x < num ; x++)
- {
- var = VARRAY_TREE (build_v_must_defs, x);
- /* Look for VAR in the original vector. */
- for (i = 0; i < old_num; i++)
- {
- result = old_ops->v_must_defs[i].def;
- if (TREE_CODE (result) == SSA_NAME)
- result = SSA_NAME_VAR (result);
- if (result == var)
- {
- initialize_v_must_def_operand (v_must_def_ops, x,
- old_ops->v_must_defs[i].def,
- old_ops->v_must_defs[i].use,
- stmt,
- &(old_ops->v_must_defs[i].imm_use));
- break;
- }
- }
- if (i == old_num)
- {
- initialize_v_must_def_operand (v_must_def_ops, x, var, var, stmt,
- NULL);
- }
- }
+ tree op;
+ unsigned i;
+
+ last = add_vuse_op (stmt, NULL, VEC_length (tree, new_ops), NULL);
+
+ for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
+ SET_USE (VUSE_OP_PTR (last, (int) i), op);
+
+ VUSE_OPS (stmt) = last;
+ VEC_free (tree, heap, new_ops);
}
- VARRAY_POP_ALL (build_v_must_defs);
- return v_must_def_ops;
+#ifdef ENABLE_CHECKING
+ {
+ unsigned x;
+
+ if (VUSE_OPS (stmt))
+ {
+ gcc_assert (VUSE_OPS (stmt)->next == NULL);
+ x = VUSE_NUM (VUSE_OPS (stmt));
+ }
+ else
+ x = 0;
+
+ gcc_assert (x == VEC_length (tree, build_vuses));
+ }
+#endif
}
+/* Return a new VUSE operand vector for STMT. */
+
+static void
+finalize_ssa_vuses (tree stmt)
+{
+ unsigned num, num_vdefs;
+ unsigned vuse_index;
-/* Finalize all the build vectors, fill the new ones into INFO. */
+ /* Remove superfluous VUSE operands. If the statement already has a
+ VDEF operator for a variable 'a', then a VUSE for 'a' is not
+ needed because VDEFs imply a VUSE of the variable. For instance,
+ suppose that variable 'a' is pointed-to by p and q:
+
+ # VUSE <a_2>
+ # a_3 = VDEF <a_2>
+ *p = *q;
+
+ The VUSE <a_2> is superfluous because it is implied by the
+ VDEF operator. */
+ num = VEC_length (tree, build_vuses);
+ num_vdefs = VEC_length (tree, build_vdefs);
+
+ if (num > 0 && num_vdefs > 0)
+ for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
+ {
+ tree vuse;
+ vuse = VEC_index (tree, build_vuses, vuse_index);
+ if (TREE_CODE (vuse) != SSA_NAME)
+ {
+ var_ann_t ann = var_ann (vuse);
+ ann->in_vuse_list = 0;
+ if (ann->in_vdef_list)
+ {
+ VEC_ordered_remove (tree, build_vuses, vuse_index);
+ continue;
+ }
+ }
+ vuse_index++;
+ }
+
+ finalize_ssa_vuse_ops (stmt);
+}
+
+
+/* Clear the in_list bits and empty the build array for VDEFs and
+ VUSEs. */
+
+static inline void
+cleanup_build_arrays (void)
+{
+ unsigned i;
+ tree t;
+
+ for (i = 0; VEC_iterate (tree, build_vdefs, i, t); i++)
+ if (TREE_CODE (t) != SSA_NAME)
+ var_ann (t)->in_vdef_list = false;
+
+ for (i = 0; VEC_iterate (tree, build_vuses, i, t); i++)
+ if (TREE_CODE (t) != SSA_NAME)
+ var_ann (t)->in_vuse_list = false;
+
+ VEC_truncate (tree, build_vdefs, 0);
+ VEC_truncate (tree, build_vuses, 0);
+ VEC_truncate (tree, build_defs, 0);
+ VEC_truncate (tree, build_uses, 0);
+ bitmap_clear (build_loads);
+ bitmap_clear (build_stores);
+}
+
+/* Finalize all the build vectors, fill the new ones into INFO. */
+
static inline void
-finalize_ssa_stmt_operands (tree stmt, stmt_operands_p old_ops,
- stmt_operands_p new_ops)
+finalize_ssa_stmt_operands (tree stmt)
{
- new_ops->def_ops = finalize_ssa_defs (&(old_ops->def_ops), stmt);
- new_ops->use_ops = finalize_ssa_uses (&(old_ops->use_ops), stmt);
- new_ops->v_must_def_ops
- = finalize_ssa_v_must_defs (&(old_ops->v_must_def_ops), stmt);
- new_ops->v_may_def_ops
- = finalize_ssa_v_may_defs (&(old_ops->v_may_def_ops), stmt);
- new_ops->vuse_ops = finalize_ssa_vuses (&(old_ops->vuse_ops), stmt);
+ finalize_ssa_defs (stmt);
+ finalize_ssa_uses (stmt);
+ finalize_ssa_vdefs (stmt);
+ finalize_ssa_vuses (stmt);
+ cleanup_build_arrays ();
}
static inline void
start_ssa_stmt_operands (void)
{
- gcc_assert (VARRAY_ACTIVE_SIZE (build_defs) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_uses) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_vuses) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_v_may_defs) == 0);
- gcc_assert (VARRAY_ACTIVE_SIZE (build_v_must_defs) == 0);
+ gcc_assert (VEC_length (tree, build_defs) == 0);
+ gcc_assert (VEC_length (tree, build_uses) == 0);
+ gcc_assert (VEC_length (tree, build_vuses) == 0);
+ gcc_assert (VEC_length (tree, build_vdefs) == 0);
+ gcc_assert (bitmap_empty_p (build_loads));
+ gcc_assert (bitmap_empty_p (build_stores));
}
static inline void
append_def (tree *def_p)
{
- VARRAY_PUSH_TREE_PTR (build_defs, def_p);
+ VEC_safe_push (tree, heap, build_defs, (tree) def_p);
}
static inline void
append_use (tree *use_p)
{
- VARRAY_PUSH_TREE_PTR (build_uses, use_p);
+ VEC_safe_push (tree, heap, build_uses, (tree) use_p);
}
-/* Add a new virtual may def for variable VAR to the build array. */
+/* Add VAR to the set of variables that require a VDEF operator. */
static inline void
-append_v_may_def (tree var)
+append_vdef (tree var)
{
- var_ann_t ann = get_var_ann (var);
+ tree sym;
- /* Don't allow duplicate entries. */
- if (ann->in_v_may_def_list)
- return;
- ann->in_v_may_def_list = 1;
+ if (TREE_CODE (var) != SSA_NAME)
+ {
+ tree mpt;
+ var_ann_t ann;
+
+ /* If VAR belongs to a memory partition, use it instead of VAR. */
+ mpt = memory_partition (var);
+ if (mpt)
+ var = mpt;
- VARRAY_PUSH_TREE (build_v_may_defs, var);
+ /* Don't allow duplicate entries. */
+ ann = get_var_ann (var);
+ if (ann->in_vdef_list)
+ return;
+
+ ann->in_vdef_list = true;
+ sym = var;
+ }
+ else
+ sym = SSA_NAME_VAR (var);
+
+ VEC_safe_push (tree, heap, build_vdefs, var);
+ bitmap_set_bit (build_stores, DECL_UID (sym));
}
-/* Add VAR to the list of virtual uses. */
+/* Add VAR to the set of variables that require a VUSE operator. */
static inline void
append_vuse (tree var)
{
+ tree sym;
- /* Don't allow duplicate entries. */
if (TREE_CODE (var) != SSA_NAME)
{
- var_ann_t ann = get_var_ann (var);
+ tree mpt;
+ var_ann_t ann;
- if (ann->in_vuse_list || ann->in_v_may_def_list)
- return;
- ann->in_vuse_list = 1;
+ /* If VAR belongs to a memory partition, use it instead of VAR. */
+ mpt = memory_partition (var);
+ if (mpt)
+ var = mpt;
+
+ /* Don't allow duplicate entries. */
+ ann = get_var_ann (var);
+ if (ann->in_vuse_list || ann->in_vdef_list)
+ return;
+
+ ann->in_vuse_list = true;
+ sym = var;
}
+ else
+ sym = SSA_NAME_VAR (var);
- VARRAY_PUSH_TREE (build_vuses, var);
+ VEC_safe_push (tree, heap, build_vuses, var);
+ bitmap_set_bit (build_loads, DECL_UID (sym));
}
-/* Add VAR to the list of virtual must definitions for INFO. */
+/* REF is a tree that contains the entire pointer dereference
+ expression, if available, or NULL otherwise. ALIAS is the variable
+ we are asking if REF can access. OFFSET and SIZE come from the
+ memory access expression that generated this virtual operand. */
-static inline void
-append_v_must_def (tree var)
+static bool
+access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
+ HOST_WIDE_INT size)
{
- unsigned i;
+ bool offsetgtz = offset > 0;
+ unsigned HOST_WIDE_INT uoffset = (unsigned HOST_WIDE_INT) offset;
+ tree base = ref ? get_base_address (ref) : NULL;
+
+ /* If ALIAS is .GLOBAL_VAR then the memory reference REF must be
+ using a call-clobbered memory tag. By definition, call-clobbered
+ memory tags can always touch .GLOBAL_VAR. */
+ if (alias == gimple_global_var (cfun))
+ return true;
+
+ /* If ALIAS is an SFT, it can't be touched if the offset
+ and size of the access is not overlapping with the SFT offset and
+ size. This is only true if we are accessing through a pointer
+ to a type that is the same as SFT_PARENT_VAR. Otherwise, we may
+ be accessing through a pointer to some substruct of the
+ structure, and if we try to prune there, we will have the wrong
+ offset, and get the wrong answer.
+ i.e., we can't prune without more work if we have something like
+
+ struct gcc_target
+ {
+ struct asm_out
+ {
+ const char *byte_op;
+ struct asm_int_op
+ {
+ const char *hi;
+ } aligned_op;
+ } asm_out;
+ } targetm;
+
+ foo = &targetm.asm_out.aligned_op;
+ return foo->hi;
+
+ SFT.1, which represents hi, will have SFT_OFFSET=32 because in
+ terms of SFT_PARENT_VAR, that is where it is.
+ However, the access through the foo pointer will be at offset 0. */
+ if (size != -1
+ && TREE_CODE (alias) == STRUCT_FIELD_TAG
+ && base
+ && TREE_TYPE (base) == TREE_TYPE (SFT_PARENT_VAR (alias))
+ && !overlap_subvar (offset, size, alias, NULL))
+ {
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
- /* Don't allow duplicate entries. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_must_defs); i++)
- if (var == VARRAY_TREE (build_v_must_defs, i))
- return;
+ /* Without strict aliasing, it is impossible for a component access
+ through a pointer to touch a random variable, unless that
+ variable *is* a structure or a pointer.
+
+ That is, given p->c, and some random global variable b,
+ there is no legal way that p->c could be an access to b.
+
+ Without strict aliasing on, we consider it legal to do something
+ like:
+
+ struct foos { int l; };
+ int foo;
+ static struct foos *getfoo(void);
+ int main (void)
+ {
+ struct foos *f = getfoo();
+ f->l = 1;
+ foo = 2;
+ if (f->l == 1)
+ abort();
+ exit(0);
+ }
+ static struct foos *getfoo(void)
+ { return (struct foos *)&foo; }
+
+ (taken from 20000623-1.c)
+
+ The docs also say/imply that access through union pointers
+ is legal (but *not* if you take the address of the union member,
+ i.e. the inverse), such that you can do
+
+ typedef union {
+ int d;
+ } U;
+
+ int rv;
+ void breakme()
+ {
+ U *rv0;
+ U *pretmp = (U*)&rv;
+ rv0 = pretmp;
+ rv0->d = 42;
+ }
+ To implement this, we just punt on accesses through union
+ pointers entirely.
+ */
+ else if (ref
+ && flag_strict_aliasing
+ && TREE_CODE (ref) != INDIRECT_REF
+ && !MTAG_P (alias)
+ && (TREE_CODE (base) != INDIRECT_REF
+ || TREE_CODE (TREE_TYPE (base)) != UNION_TYPE)
+ && !AGGREGATE_TYPE_P (TREE_TYPE (alias))
+ && TREE_CODE (TREE_TYPE (alias)) != COMPLEX_TYPE
+ && !var_ann (alias)->is_heapvar
+ /* When the struct has may_alias attached to it, we need not to
+ return true. */
+ && get_alias_set (base))
+ {
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
+
+ /* If the offset of the access is greater than the size of one of
+ the possible aliases, it can't be touching that alias, because it
+ would be past the end of the structure. */
+ else if (ref
+ && flag_strict_aliasing
+ && TREE_CODE (ref) != INDIRECT_REF
+ && !MTAG_P (alias)
+ && !POINTER_TYPE_P (TREE_TYPE (alias))
+ && offsetgtz
+ && DECL_SIZE (alias)
+ && TREE_CODE (DECL_SIZE (alias)) == INTEGER_CST
+ && uoffset > TREE_INT_CST_LOW (DECL_SIZE (alias)))
+ {
+#ifdef ACCESS_DEBUGGING
+ fprintf (stderr, "Access to ");
+ print_generic_expr (stderr, ref, 0);
+ fprintf (stderr, " may not touch ");
+ print_generic_expr (stderr, alias, 0);
+ fprintf (stderr, " in function %s\n", get_name (current_function_decl));
+#endif
+ return false;
+ }
+
+ return true;
+}
+
+
+/* Add VAR to the virtual operands array. FLAGS is as in
+ get_expr_operands. FULL_REF is a tree that contains the entire
+ pointer dereference expression, if available, or NULL otherwise.
+ OFFSET and SIZE come from the memory access expression that
+ generated this virtual operand. IS_CALL_SITE is true if the
+ affected statement is a call site. */
+
+static void
+add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
+ tree full_ref, HOST_WIDE_INT offset,
+ HOST_WIDE_INT size, bool is_call_site)
+{
+ bitmap aliases = NULL;
+ tree sym;
+ var_ann_t v_ann;
+
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark the statement as having memory operands. */
+ s_ann->references_memory = true;
+
+ /* Mark statements with volatile operands. Optimizers should back
+ off from statements having volatile operands. */
+ if (TREE_THIS_VOLATILE (sym) && s_ann)
+ s_ann->has_volatile_ops = true;
+
+ /* If the variable cannot be modified and this is a VDEF change
+ it into a VUSE. This happens when read-only variables are marked
+ call-clobbered and/or aliased to writable variables. So we only
+ check that this only happens on non-specific stores.
+
+ Note that if this is a specific store, i.e. associated with a
+ GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
+ into validation problems.
+
+ This can happen when programs cast away const, leaving us with a
+ store to read-only memory. If the statement is actually executed
+ at runtime, then the program is ill formed. If the statement is
+ not executed then all is well. At the very least, we cannot ICE. */
+ if ((flags & opf_implicit) && unmodifiable_var_p (var))
+ flags &= ~opf_def;
+
+ /* The variable is not a GIMPLE register. Add it (or its aliases) to
+ virtual operands, unless the caller has specifically requested
+ not to add virtual operands (used when adding operands inside an
+ ADDR_EXPR expression). */
+ if (flags & opf_no_vops)
+ return;
+
+ if (MTAG_P (var))
+ aliases = MTAG_ALIASES (var);
+
+ if (aliases == NULL)
+ {
+ if (s_ann && !gimple_aliases_computed_p (cfun))
+ s_ann->has_volatile_ops = true;
- VARRAY_PUSH_TREE (build_v_must_defs, var);
+ /* The variable is not aliased or it is an alias tag. */
+ if (flags & opf_def)
+ append_vdef (var);
+ else
+ append_vuse (var);
+ }
+ else
+ {
+ bitmap_iterator bi;
+ unsigned int i;
+ tree al;
+
+ /* The variable is aliased. Add its aliases to the virtual
+ operands. */
+ gcc_assert (!bitmap_empty_p (aliases));
+
+ if (flags & opf_def)
+ {
+ bool none_added = true;
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
+ {
+ al = referenced_var (i);
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ none_added = false;
+ append_vdef (al);
+ }
+
+ /* If the variable is also an alias tag, add a virtual
+ operand for it, otherwise we will miss representing
+ references to the members of the variable's alias set.
+ This fixes the bug in gcc.c-torture/execute/20020503-1.c.
+
+ It is also necessary to add bare defs on clobbers for
+ SMT's, so that bare SMT uses caused by pruning all the
+ aliases will link up properly with calls. In order to
+ keep the number of these bare defs we add down to the
+ minimum necessary, we keep track of which SMT's were used
+ alone in statement vdefs or VUSEs. */
+ if (none_added
+ || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
+ && is_call_site))
+ {
+ append_vdef (var);
+ }
+ }
+ else
+ {
+ bool none_added = true;
+ EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
+ {
+ al = referenced_var (i);
+ if (!access_can_touch_variable (full_ref, al, offset, size))
+ continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
+ none_added = false;
+ append_vuse (al);
+ }
+
+ /* Even if no aliases have been added, we still need to
+ establish def-use and use-def chains, lest
+ transformations think that this is not a memory
+ reference. For an example of this scenario, see
+ testsuite/g++.dg/opt/cleanup1.C. */
+ if (none_added)
+ append_vuse (var);
+ }
+ }
}
-/* Parse STMT looking for operands. OLD_OPS is the original stmt operand
- cache for STMT, if it existed before. When finished, the various build_*
- operand vectors will have potential operands. in them. */
-
+/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
+ get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
+ the statement's real operands, otherwise it is added to virtual
+ operands. */
+
static void
-parse_ssa_operands (tree stmt)
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
{
- enum tree_code code;
+ tree var, sym;
+ var_ann_t v_ann;
- code = TREE_CODE (stmt);
- switch (code)
+ gcc_assert (SSA_VAR_P (*var_p) && s_ann);
+
+ var = *var_p;
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark statements with volatile operands. */
+ if (TREE_THIS_VOLATILE (sym))
+ s_ann->has_volatile_ops = true;
+
+ if (is_gimple_reg (sym))
{
- case MODIFY_EXPR:
- /* First get operands from the RHS. For the LHS, we use a V_MAY_DEF if
- either only part of LHS is modified or if the RHS might throw,
- otherwise, use V_MUST_DEF.
+ /* The variable is a GIMPLE register. Add it to real operands. */
+ if (flags & opf_def)
+ append_def (var_p);
+ else
+ append_use (var_p);
+ }
+ else
+ add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
+}
- ??? If it might throw, we should represent somehow that it is killed
- on the fallthrough path. */
- {
- tree lhs = TREE_OPERAND (stmt, 0);
- int lhs_flags = opf_is_def;
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 1), opf_none);
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
- /* If the LHS is a VIEW_CONVERT_EXPR, it isn't changing whether
- or not the entire LHS is modified; that depends on what's
- inside the VIEW_CONVERT_EXPR. */
- if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
- lhs = TREE_OPERAND (lhs, 0);
+ STMT is the statement being processed, EXPR is the INDIRECT_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands.
- if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
- && TREE_CODE (lhs) != BIT_FIELD_REF
- && TREE_CODE (lhs) != REALPART_EXPR
- && TREE_CODE (lhs) != IMAGPART_EXPR)
- lhs_flags |= opf_kill_def;
+ FULL_REF contains the full pointer dereference expression, if we
+ have it, or NULL otherwise.
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), lhs_flags);
- }
- break;
+ OFFSET and SIZE are the location of the access inside the
+ dereferenced pointer, if known.
- case COND_EXPR:
- get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
- break;
+ RECURSE_ON_BASE should be set to true if we want to continue
+ calling get_expr_operands on the base pointer, and false if
+ something else will do it for us. */
- case SWITCH_EXPR:
- get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
- break;
+static void
+get_indirect_ref_operands (tree stmt, tree expr, int flags,
+ tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ bool recurse_on_base)
+{
+ tree *pptr = &TREE_OPERAND (expr, 0);
+ tree ptr = *pptr;
+ stmt_ann_t s_ann = stmt_ann (stmt);
- case ASM_EXPR:
- get_asm_expr_operands (stmt);
- break;
+ s_ann->references_memory = true;
+ if (s_ann && TREE_THIS_VOLATILE (expr))
+ s_ann->has_volatile_ops = true;
- case RETURN_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
- break;
+ if (SSA_VAR_P (ptr))
+ {
+ struct ptr_info_def *pi = NULL;
- case GOTO_EXPR:
- get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
- break;
+ /* If PTR has flow-sensitive points-to information, use it. */
+ if (TREE_CODE (ptr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
+ && pi->name_mem_tag)
+ {
+ /* PTR has its own memory tag. Use it. */
+ add_virtual_operand (pi->name_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+ }
+ else
+ {
+ /* If PTR is not an SSA_NAME or it doesn't have a name
+ tag, use its symbol memory tag. */
+ var_ann_t v_ann;
- case LABEL_EXPR:
- get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
- break;
+ /* If we are emitting debugging dumps, display a warning if
+ PTR is an SSA_NAME with no flow-sensitive alias
+ information. That means that we may need to compute
+ aliasing again. */
+ if (dump_file
+ && TREE_CODE (ptr) == SSA_NAME
+ && pi == NULL)
+ {
+ fprintf (dump_file,
+ "NOTE: no flow-sensitive alias info for ");
+ print_generic_expr (dump_file, ptr, dump_flags);
+ fprintf (dump_file, " in ");
+ print_generic_stmt (dump_file, stmt, dump_flags);
+ }
- /* These nodes contain no variable references. */
- case BIND_EXPR:
- case CASE_LABEL_EXPR:
- case TRY_CATCH_EXPR:
- case TRY_FINALLY_EXPR:
- case EH_FILTER_EXPR:
- case CATCH_EXPR:
- case RESX_EXPR:
- break;
+ if (TREE_CODE (ptr) == SSA_NAME)
+ ptr = SSA_NAME_VAR (ptr);
+ v_ann = var_ann (ptr);
- default:
- /* Notice that if get_expr_operands tries to use &STMT as the operand
- pointer (which may only happen for USE operands), we will fail in
- append_use. This default will handle statements like empty
- statements, or CALL_EXPRs that may appear on the RHS of a statement
- or as statements themselves. */
- get_expr_operands (stmt, &stmt, opf_none);
- break;
+ if (v_ann->symbol_mem_tag)
+ add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
+ full_ref, offset, size, false);
+
+ /* Aliasing information is missing; mark statement as
+ volatile so we won't optimize it out too actively. */
+ else if (s_ann
+ && !gimple_aliases_computed_p (cfun)
+ && (flags & opf_def))
+ s_ann->has_volatile_ops = true;
+ }
}
+ else if (TREE_CODE (ptr) == INTEGER_CST)
+ {
+ /* If a constant is used as a pointer, we can't generate a real
+ operand for it but we mark the statement volatile to prevent
+ optimizations from messing things up. */
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+ else
+ {
+ /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
+ gcc_unreachable ();
+ }
+
+ /* If requested, add a USE operand for the base pointer. */
+ if (recurse_on_base)
+ get_expr_operands (stmt, pptr, opf_use);
}
-/* Create an operands cache for STMT, returning it in NEW_OPS. OLD_OPS are the
- original operands, and if ANN is non-null, appropriate stmt flags are set
- in the stmt's annotation. If ANN is NULL, this is not considered a "real"
- stmt, and none of the operands will be entered into their respective
- immediate uses tables. This is to allow stmts to be processed when they
- are not actually in the CFG.
- Note that some fields in old_ops may change to NULL, although none of the
- memory they originally pointed to will be destroyed. It is appropriate
- to call free_stmt_operands() on the value returned in old_ops.
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+
+static void
+get_tmr_operands (tree stmt, tree expr, int flags)
+{
+ tree tag, ref;
+ HOST_WIDE_INT offset, size, maxsize;
+ subvar_t svars, sv;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+
+ /* This statement references memory. */
+ s_ann->references_memory = 1;
+
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
+
+ if (TMR_SYMBOL (expr))
+ add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
+
+ tag = TMR_TAG (expr);
+ if (!tag)
+ {
+ /* Something weird, so ensure that we will be careful. */
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+
+ if (DECL_P (tag))
+ {
+ get_expr_operands (stmt, &tag, flags);
+ return;
+ }
+
+ ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
+ gcc_assert (ref != NULL_TREE);
+ svars = get_subvars_for_var (ref);
+ for (sv = svars; sv; sv = sv->next)
+ {
+ bool exact;
+
+ if (overlap_subvar (offset, maxsize, sv->var, &exact))
+ add_stmt_operand (&sv->var, s_ann, flags);
+ }
+}
- The rationale for this: Certain optimizations wish to examine the difference
- between new_ops and old_ops after processing. If a set of operands don't
- change, new_ops will simply assume the pointer in old_ops, and the old_ops
- pointer will be set to NULL, indicating no memory needs to be cleared.
- Usage might appear something like:
- old_ops_copy = old_ops = stmt_ann(stmt)->operands;
- build_ssa_operands (stmt, NULL, &old_ops, &new_ops);
- <* compare old_ops_copy and new_ops *>
- free_ssa_operands (old_ops); */
+/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
+ clobbered variables in the function. */
static void
-build_ssa_operands (tree stmt, stmt_ann_t ann, stmt_operands_p old_ops,
- stmt_operands_p new_ops)
+add_call_clobber_ops (tree stmt, tree callee)
{
- tree_ann_t saved_ann = stmt->common.ann;
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b, not_written_b;
- /* Replace stmt's annotation with the one passed in for the duration
- of the operand building process. This allows "fake" stmts to be built
- and not be included in other data structures which can be built here. */
- stmt->common.ann = (tree_ann_t) ann;
+ /* Functions that are not const, pure or never return may clobber
+ call-clobbered variables. */
+ if (s_ann)
+ s_ann->makes_clobbering_call = true;
- parse_old_ops = old_ops;
-
- /* Initially assume that the statement has no volatile operands, nor
- makes aliased loads or stores. */
- if (ann)
+ /* If we created .GLOBAL_VAR earlier, just use it. */
+ if (gimple_global_var (cfun))
{
- ann->has_volatile_ops = false;
- ann->makes_aliased_stores = false;
- ann->makes_aliased_loads = false;
+ tree var = gimple_global_var (cfun);
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+ return;
}
- start_ssa_stmt_operands ();
+ /* Get info for local and module level statics. There is a bit
+ set for each static if the call being processed does not read
+ or write that variable. */
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+ not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
- parse_ssa_operands (stmt);
+ /* Add a VDEF operand for every call clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var_lookup (u);
+ unsigned int escape_mask = var_ann (var)->escape_mask;
+ tree real_var = var;
+ bool not_read;
+ bool not_written;
+
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
+
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ not_written = not_written_b
+ ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
+ : false;
+ gcc_assert (!unmodifiable_var_p (var));
+
+ clobber_stats.clobbered_vars++;
- parse_old_ops = NULL;
+ /* See if this variable is really clobbered by this function. */
- if (ann)
- finalize_ssa_stmt_operands (stmt, old_ops, new_ops);
- else
- finalize_ssa_stmt_operands (NULL, old_ops, new_ops);
- stmt->common.ann = saved_ann;
+ /* Trivial case: Things escaping only to pure/const are not
+ clobbered by non-pure-const, and only read by pure/const. */
+ if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
+ {
+ tree call = get_call_expr_in (stmt);
+ if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
+ {
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ else
+ {
+ clobber_stats.unescapable_clobbers_avoided++;
+ continue;
+ }
+ }
+
+ if (not_written)
+ {
+ clobber_stats.static_write_clobbers_avoided++;
+ if (!not_read)
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ else
+ clobber_stats.static_read_clobbers_avoided++;
+ }
+ else
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+ }
}
-/* Free any operands vectors in OPS. */
+/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
+ function. */
-static void
-free_ssa_operands (stmt_operands_p ops)
+static void
+add_call_read_ops (tree stmt, tree callee)
+{
+ unsigned u;
+ bitmap_iterator bi;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+ bitmap not_read_b;
+
+ /* if the function is not pure, it may reference memory. Add
+ a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
+ for the heuristic used to decide whether to create .GLOBAL_VAR. */
+ if (gimple_global_var (cfun))
+ {
+ tree var = gimple_global_var (cfun);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ return;
+ }
+
+ not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
+
+ /* Add a VUSE for each call-clobbered variable. */
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
+ {
+ tree var = referenced_var (u);
+ tree real_var = var;
+ bool not_read;
+
+ clobber_stats.readonly_clobbers++;
+
+ /* Not read and not written are computed on regular vars, not
+ subvars, so look at the parent var if this is an SFT. */
+
+ if (TREE_CODE (var) == STRUCT_FIELD_TAG)
+ real_var = SFT_PARENT_VAR (var);
+
+ not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ if (not_read)
+ {
+ clobber_stats.static_readonly_clobbers_avoided++;
+ continue;
+ }
+
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+ }
+}
+
+
+/* A subroutine of get_expr_operands to handle CALL_EXPR. */
+
+static void
+get_call_expr_operands (tree stmt, tree expr)
{
- if (ops->def_ops)
- free_defs (&(ops->def_ops));
- if (ops->use_ops)
- free_uses (&(ops->use_ops));
- if (ops->vuse_ops)
- free_vuses (&(ops->vuse_ops));
- if (ops->v_may_def_ops)
- free_v_may_defs (&(ops->v_may_def_ops));
- if (ops->v_must_def_ops)
- free_v_must_defs (&(ops->v_must_def_ops));
+ int call_flags = call_expr_flags (expr);
+ int i, nargs;
+ stmt_ann_t ann = stmt_ann (stmt);
+
+ ann->references_memory = true;
+
+ /* If aliases have been computed already, add VDEF or VUSE
+ operands for all the symbols that have been found to be
+ call-clobbered. */
+ if (gimple_aliases_computed_p (cfun)
+ && !(call_flags & ECF_NOVOPS))
+ {
+ /* A 'pure' or a 'const' function never call-clobbers anything.
+ A 'noreturn' function might, but since we don't return anyway
+ there is no point in recording that. */
+ if (TREE_SIDE_EFFECTS (expr)
+ && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
+ add_call_clobber_ops (stmt, get_callee_fndecl (expr));
+ else if (!(call_flags & ECF_CONST))
+ add_call_read_ops (stmt, get_callee_fndecl (expr));
+ }
+
+ /* Find uses in the called function. */
+ get_expr_operands (stmt, &CALL_EXPR_FN (expr), opf_use);
+ nargs = call_expr_nargs (expr);
+ for (i = 0; i < nargs; i++)
+ get_expr_operands (stmt, &CALL_EXPR_ARG (expr, i), opf_use);
+
+ get_expr_operands (stmt, &CALL_EXPR_STATIC_CHAIN (expr), opf_use);
}
-/* Swap operands EXP0 and EXP1 in STMT. */
+/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
static void
-swap_tree_operands (tree *exp0, tree *exp1)
+get_asm_expr_operands (tree stmt)
{
- tree op0, op1;
- op0 = *exp0;
- op1 = *exp1;
+ stmt_ann_t s_ann;
+ int i, noutputs;
+ const char **oconstraints;
+ const char *constraint;
+ bool allows_mem, allows_reg, is_inout;
+ tree link;
+
+ s_ann = stmt_ann (stmt);
+ noutputs = list_length (ASM_OUTPUTS (stmt));
+ oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
- /* If the operand cache is active, attempt to preserve the relative positions
- of these two operands in their respective immediate use lists. */
- if (build_defs != NULL && op0 != op1 && parse_old_ops != NULL)
+ /* Gather all output operands. */
+ for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
{
- unsigned x, use0, use1;
- use_optype uses = parse_old_ops->use_ops;
- use0 = use1 = NUM_USES (uses);
- /* Find the 2 operands in the cache, if they are there. */
- for (x = 0; x < NUM_USES (uses); x++)
- if (USE_OP_PTR (uses, x)->use == exp0)
- {
- use0 = x;
- break;
- }
- for (x = 0; x < NUM_USES (uses); x++)
- if (USE_OP_PTR (uses, x)->use == exp1)
- {
- use1 = x;
- break;
- }
- /* If both uses don't have operand entries, there isn't much we can do
- at this point. Presumably we dont need to worry about it. */
- if (use0 != NUM_USES (uses) && use1 != NUM_USES (uses))
- {
- tree *tmp = USE_OP_PTR (uses, use1)->use;
- gcc_assert (use0 != use1);
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ oconstraints[i] = constraint;
+ parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+ &allows_reg, &is_inout);
+
+ /* This should have been split in gimplify_asm_expr. */
+ gcc_assert (!allows_reg || !is_inout);
- USE_OP_PTR (uses, use1)->use = USE_OP_PTR (uses, use0)->use;
- USE_OP_PTR (uses, use0)->use = tmp;
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
+ {
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
}
+
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
}
- /* Now swap the data. */
- *exp0 = op1;
- *exp1 = op0;
-}
+ /* Gather all input operands. */
+ for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+ {
+ constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
-/* Get the operands of statement STMT. */
+ /* Memory operands are addressable. Note that STMT needs the
+ address of this operand. */
+ if (!allows_reg && allows_mem)
+ {
+ tree t = get_base_address (TREE_VALUE (link));
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
+ }
-void
-update_stmt_operands (tree stmt)
-{
- stmt_ann_t ann;
- stmt_operands_t old_operands;
+ get_expr_operands (stmt, &TREE_VALUE (link), 0);
+ }
- /* Don't do anything if we are called before SSA is initialized. */
- if (build_defs == NULL)
- return;
- /* The optimizers cannot handle statements that are nothing but a
- _DECL. This indicates a bug in the gimplifier. */
- gcc_assert (!SSA_VAR_P (stmt));
+ /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
+ for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
+ if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
+ {
+ unsigned i;
+ bitmap_iterator bi;
- ann = get_stmt_ann (stmt);
+ s_ann->references_memory = true;
- gcc_assert (ann->modified);
+ EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
+ }
- timevar_push (TV_TREE_OPS);
+ EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
+ {
+ tree var = referenced_var (i);
+
+ /* Subvars are explicitly represented in this list, so we
+ don't need the original to be added to the clobber ops,
+ but the original *will* be in this list because we keep
+ the addressability of the original variable up-to-date
+ to avoid confusing the back-end. */
+ if (var_can_have_subvars (var)
+ && get_subvars_for_var (var) != NULL)
+ continue;
+
+ add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
+ }
+ break;
+ }
+}
- old_operands = ann->operands;
- memset (&(ann->operands), 0, sizeof (stmt_operands_t));
- build_ssa_operands (stmt, ann, &old_operands, &(ann->operands));
- free_ssa_operands (&old_operands);
+/* Scan operands for the assignment expression EXPR in statement STMT. */
- /* Clear the modified bit for STMT. */
- ann->modified = 0;
+static void
+get_modify_stmt_operands (tree stmt, tree expr)
+{
+ /* First get operands from the RHS. */
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
- timevar_pop (TV_TREE_OPS);
+ /* For the LHS, use a regular definition (opf_def) for GIMPLE
+ registers. If the LHS is a store to memory, we will need
+ a preserving definition (VDEF).
+
+ Preserving definitions are those that modify a part of an
+ aggregate object for which no subvars have been computed (or the
+ reference does not correspond exactly to one of them). Stores
+ through a pointer are also represented with VDEF operators.
+
+ We used to distinguish between preserving and killing definitions.
+ We always emit preserving definitions now. */
+ get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
}
-/* Recursively scan the expression pointed by EXPR_P in statement referred to
- by INFO. FLAGS is one of the OPF_* constants modifying how to interpret the
- operands found. */
+/* Recursively scan the expression pointed to by EXPR_P in statement
+ STMT. FLAGS is one of the OPF_* constants modifying how to
+ interpret the operands found. */
static void
get_expr_operands (tree stmt, tree *expr_p, int flags)
switch (code)
{
case ADDR_EXPR:
- /* We could have the address of a component, array member,
- etc which has interesting variable references. */
/* Taking the address of a variable does not represent a
- reference to it, but the fact that the stmt takes its address will be
- of interest to some passes (e.g. alias resolution). */
- add_stmt_operand (expr_p, s_ann, 0);
+ reference to it, but the fact that the statement takes its
+ address will be of interest to some passes (e.g. alias
+ resolution). */
+ add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
- /* If the address is invariant, there may be no interesting variable
- references inside. */
+ /* If the address is invariant, there may be no interesting
+ variable references inside. */
if (is_gimple_min_invariant (expr))
return;
- /* There should be no VUSEs created, since the referenced objects are
- not really accessed. The only operands that we should find here
- are ARRAY_REF indices which will always be real operands (GIMPLE
- does not allow non-registers as array indices). */
+ /* Otherwise, there may be variables referenced inside but there
+ should be no VUSEs created, since the referenced objects are
+ not really accessed. The only operands that we should find
+ here are ARRAY_REF indices which will always be real operands
+ (GIMPLE does not allow non-registers as array indices). */
flags |= opf_no_vops;
-
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case SSA_NAME:
+ case STRUCT_FIELD_TAG:
+ case SYMBOL_MEMORY_TAG:
+ case NAME_MEMORY_TAG:
+ add_stmt_operand (expr_p, s_ann, flags);
+ return;
+
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
- case CONST_DECL:
{
subvar_t svars;
- /* Add the subvars for a variable if it has subvars, to DEFS or USES.
- Otherwise, add the variable itself.
- Whether it goes to USES or DEFS depends on the operand flags. */
+ /* Add the subvars for a variable, if it has subvars, to DEFS
+ or USES. Otherwise, add the variable itself. Whether it
+ goes to USES or DEFS depends on the operand flags. */
if (var_can_have_subvars (expr)
&& (svars = get_subvars_for_var (expr)))
{
add_stmt_operand (&sv->var, s_ann, flags);
}
else
- {
- add_stmt_operand (expr_p, s_ann, flags);
- }
+ add_stmt_operand (expr_p, s_ann, flags);
+
return;
}
+
case MISALIGNED_INDIRECT_REF:
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
/* fall through */
case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
- get_indirect_ref_operands (stmt, expr, flags);
+ get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
return;
- case ARRAY_REF:
- case ARRAY_RANGE_REF:
- /* Treat array references as references to the virtual variable
- representing the array. The virtual variable for an ARRAY_REF
- is the VAR_DECL for the array. */
-
- /* Add the virtual variable for the ARRAY_REF to VDEFS or VUSES
- according to the value of IS_DEF. Recurse if the LHS of the
- ARRAY_REF node is not a regular variable. */
- if (SSA_VAR_P (TREE_OPERAND (expr, 0)))
- add_stmt_operand (expr_p, s_ann, flags);
- else
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
+ case TARGET_MEM_REF:
+ get_tmr_operands (stmt, expr, flags);
return;
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
{
tree ref;
- HOST_WIDE_INT offset, size;
- /* This component ref becomes an access to all of the subvariables
- it can touch, if we can determine that, but *NOT* the real one.
- If we can't determine which fields we could touch, the recursion
- will eventually get to a variable and add *all* of its subvars, or
- whatever is the minimum correct subset. */
-
- ref = okay_component_ref_for_subvars (expr, &offset, &size);
- if (ref)
- {
- subvar_t svars = get_subvars_for_var (ref);
+ HOST_WIDE_INT offset, size, maxsize;
+ bool none = true;
+
+ /* This component reference becomes an access to all of the
+ subvariables it can touch, if we can determine that, but
+ *NOT* the real one. If we can't determine which fields we
+ could touch, the recursion will eventually get to a
+ variable and add *all* of its subvars, or whatever is the
+ minimum correct subset. */
+ ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
+ if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
+ {
subvar_t sv;
+ subvar_t svars = get_subvars_for_var (ref);
+
for (sv = svars; sv; sv = sv->next)
{
bool exact;
- if (overlap_subvar (offset, size, sv, &exact))
+
+ if (overlap_subvar (offset, maxsize, sv->var, &exact))
{
- if (exact)
- flags &= ~opf_kill_def;
- add_stmt_operand (&sv->var, s_ann, flags);
+ int subvar_flags = flags;
+ none = false;
+ add_stmt_operand (&sv->var, s_ann, subvar_flags);
}
}
+
+ if (!none)
+ flags |= opf_no_vops;
}
- else
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
- flags & ~opf_kill_def);
+ else if (TREE_CODE (ref) == INDIRECT_REF)
+ {
+ get_indirect_ref_operands (stmt, ref, flags, expr, offset,
+ maxsize, false);
+ flags |= opf_no_vops;
+ }
+
+ /* Even if we found subvars above we need to ensure to see
+ immediate uses for d in s.a[d]. In case of s.a having
+ a subvar or we would miss it otherwise. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
if (code == COMPONENT_REF)
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ {
+ if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ s_ann->has_volatile_ops = true;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ }
+ else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
+ }
+
return;
}
+
case WITH_SIZE_EXPR:
/* WITH_SIZE_EXPR is a pass-through reference to its first argument,
and an rvalue reference to its second argument. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case COND_EXPR:
case VEC_COND_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
return;
- case MODIFY_EXPR:
- {
- int subflags;
- tree op;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
-
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == WITH_SIZE_EXPR)
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) == ARRAY_REF
- || TREE_CODE (op) == ARRAY_RANGE_REF
- || TREE_CODE (op) == REALPART_EXPR
- || TREE_CODE (op) == IMAGPART_EXPR)
- subflags = opf_is_def;
- else
- subflags = opf_is_def | opf_kill_def;
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), subflags);
- return;
- }
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, expr);
+ return;
case CONSTRUCTOR:
{
/* General aggregate CONSTRUCTORs have been decomposed, but they
are still in use as the COMPLEX_EXPR equivalent for vectors. */
+ constructor_elt *ce;
+ unsigned HOST_WIDE_INT idx;
- tree t;
- for (t = TREE_OPERAND (expr, 0); t ; t = TREE_CHAIN (t))
- get_expr_operands (stmt, &TREE_VALUE (t), opf_none);
+ for (idx = 0;
+ VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
+ idx++)
+ get_expr_operands (stmt, &ce->value, opf_use);
return;
}
- case TRUTH_NOT_EXPR:
case BIT_FIELD_REF:
+ case TRUTH_NOT_EXPR:
case VIEW_CONVERT_EXPR:
do_unary:
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
case ASSERT_EXPR:
do_binary:
{
- tree op0 = TREE_OPERAND (expr, 0);
- tree op1 = TREE_OPERAND (expr, 1);
-
- /* If it would be profitable to swap the operands, then do so to
- canonicalize the statement, enabling better optimization.
-
- By placing canonicalization of such expressions here we
- transparently keep statements in canonical form, even
- when the statement is modified. */
- if (tree_swap_operands_p (op0, op1, false))
- {
- /* For relationals we need to swap the operands
- and change the code. */
- if (code == LT_EXPR
- || code == GT_EXPR
- || code == LE_EXPR
- || code == GE_EXPR)
- {
- TREE_SET_CODE (expr, swap_tree_comparison (code));
- swap_tree_operands (&TREE_OPERAND (expr, 0),
- &TREE_OPERAND (expr, 1));
- }
-
- /* For a commutative operator we can just swap the operands. */
- else if (commutative_tree_code (code))
- {
- swap_tree_operands (&TREE_OPERAND (expr, 0),
- &TREE_OPERAND (expr, 1));
- }
- }
-
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
return;
}
+ case DOT_PROD_EXPR:
case REALIGN_LOAD_EXPR:
{
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
case EXC_PTR_EXPR:
case FILTER_EXPR:
case LABEL_DECL:
+ case CONST_DECL:
+ case OMP_PARALLEL:
+ case OMP_SECTIONS:
+ case OMP_FOR:
+ case OMP_SINGLE:
+ case OMP_MASTER:
+ case OMP_ORDERED:
+ case OMP_CRITICAL:
+ case OMP_RETURN:
+ case OMP_CONTINUE:
/* Expressions that make no memory references. */
return;
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
- internal_error ("internal error");
#endif
gcc_unreachable ();
}
-/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
-
-static void
-get_asm_expr_operands (tree stmt)
-{
- stmt_ann_t s_ann = stmt_ann (stmt);
- int noutputs = list_length (ASM_OUTPUTS (stmt));
- const char **oconstraints
- = (const char **) alloca ((noutputs) * sizeof (const char *));
- int i;
- tree link;
- const char *constraint;
- bool allows_mem, allows_reg, is_inout;
-
- for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
- {
- oconstraints[i] = constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_output_constraint (&constraint, i, 0, 0,
- &allows_mem, &allows_reg, &is_inout);
-
- /* This should have been split in gimplify_asm_expr. */
- gcc_assert (!allows_reg || !is_inout);
-
- /* Memory operands are addressable. Note that STMT needs the
- address of this operand. */
- if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- note_addressable (t, s_ann);
- }
-
- get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
- }
-
- for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
- {
- constraint
- = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_input_constraint (&constraint, 0, 0, noutputs, 0,
- oconstraints, &allows_mem, &allows_reg);
-
- /* Memory operands are addressable. Note that STMT needs the
- address of this operand. */
- if (!allows_reg && allows_mem)
- {
- tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- note_addressable (t, s_ann);
- }
-
- get_expr_operands (stmt, &TREE_VALUE (link), 0);
- }
-
-
- /* Clobber memory for asm ("" : : : "memory"); */
- for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
- if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
- {
- unsigned i;
- bitmap_iterator bi;
-
- /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
- decided to group them). */
- if (global_var)
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def);
- }
-
- /* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* Subvars are explicitly represented in this list, so
- we don't need the original to be added to the clobber
- ops, but the original *will* be in this list because
- we keep the addressability of the original
- variable up-to-date so we don't screw up the rest of
- the backend. */
- if (var_can_have_subvars (var)
- && get_subvars_for_var (var) != NULL)
- continue;
-
- add_stmt_operand (&var, s_ann, opf_is_def);
- }
-
- break;
- }
-}
-
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF. */
+/* Parse STMT looking for operands. When finished, the various
+ build_* operand vectors will have potential operands in them. */
static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags)
+parse_ssa_operands (tree stmt)
{
- tree *pptr = &TREE_OPERAND (expr, 0);
- tree ptr = *pptr;
- stmt_ann_t s_ann = stmt_ann (stmt);
-
- /* Stores into INDIRECT_REF operands are never killing definitions. */
- flags &= ~opf_kill_def;
+ enum tree_code code;
- if (SSA_VAR_P (ptr))
+ code = TREE_CODE (stmt);
+ switch (code)
{
- struct ptr_info_def *pi = NULL;
-
- /* If PTR has flow-sensitive points-to information, use it. */
- if (TREE_CODE (ptr) == SSA_NAME
- && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
- && pi->name_mem_tag)
- {
- /* PTR has its own memory tag. Use it. */
- add_stmt_operand (&pi->name_mem_tag, s_ann, flags);
- }
- else
- {
- /* If PTR is not an SSA_NAME or it doesn't have a name
- tag, use its type memory tag. */
- var_ann_t v_ann;
-
- /* If we are emitting debugging dumps, display a warning if
- PTR is an SSA_NAME with no flow-sensitive alias
- information. That means that we may need to compute
- aliasing again. */
- if (dump_file
- && TREE_CODE (ptr) == SSA_NAME
- && pi == NULL)
- {
- fprintf (dump_file,
- "NOTE: no flow-sensitive alias info for ");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, dump_flags);
- }
-
- if (TREE_CODE (ptr) == SSA_NAME)
- ptr = SSA_NAME_VAR (ptr);
- v_ann = var_ann (ptr);
- if (v_ann->type_mem_tag)
- add_stmt_operand (&v_ann->type_mem_tag, s_ann, flags);
- }
- }
+ case GIMPLE_MODIFY_STMT:
+ get_modify_stmt_operands (stmt, stmt);
+ break;
- /* If a constant is used as a pointer, we can't generate a real
- operand for it but we mark the statement volatile to prevent
- optimizations from messing things up. */
- else if (TREE_CODE (ptr) == INTEGER_CST)
- {
- if (s_ann)
- s_ann->has_volatile_ops = true;
- return;
- }
+ case COND_EXPR:
+ get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
+ break;
- /* Everything else *should* have been folded elsewhere, but users
- are smarter than we in finding ways to write invalid code. We
- cannot just assert here. If we were absolutely certain that we
- do handle all valid cases, then we could just do nothing here.
- That seems optimistic, so attempt to do something logical... */
- else if ((TREE_CODE (ptr) == PLUS_EXPR || TREE_CODE (ptr) == MINUS_EXPR)
- && TREE_CODE (TREE_OPERAND (ptr, 0)) == ADDR_EXPR
- && TREE_CODE (TREE_OPERAND (ptr, 1)) == INTEGER_CST)
- {
- /* Make sure we know the object is addressable. */
- pptr = &TREE_OPERAND (ptr, 0);
- add_stmt_operand (pptr, s_ann, 0);
+ case SWITCH_EXPR:
+ get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
+ break;
- /* Mark the object itself with a VUSE. */
- pptr = &TREE_OPERAND (*pptr, 0);
- get_expr_operands (stmt, pptr, flags);
- return;
- }
+ case ASM_EXPR:
+ get_asm_expr_operands (stmt);
+ break;
- /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
- else
- gcc_unreachable ();
+ case RETURN_EXPR:
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
+ break;
- /* Add a USE operand for the base pointer. */
- get_expr_operands (stmt, pptr, opf_none);
-}
+ case GOTO_EXPR:
+ get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
+ break;
-/* A subroutine of get_expr_operands to handle CALL_EXPR. */
+ case LABEL_EXPR:
+ get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
+ break;
-static void
-get_call_expr_operands (tree stmt, tree expr)
-{
- tree op;
- int call_flags = call_expr_flags (expr);
+ case BIND_EXPR:
+ case CASE_LABEL_EXPR:
+ case TRY_CATCH_EXPR:
+ case TRY_FINALLY_EXPR:
+ case EH_FILTER_EXPR:
+ case CATCH_EXPR:
+ case RESX_EXPR:
+ /* These nodes contain no variable references. */
+ break;
- /* If aliases have been computed already, add V_MAY_DEF or V_USE
- operands for all the symbols that have been found to be
- call-clobbered.
-
- Note that if aliases have not been computed, the global effects
- of calls will not be included in the SSA web. This is fine
- because no optimizer should run before aliases have been
- computed. By not bothering with virtual operands for CALL_EXPRs
- we avoid adding superfluous virtual operands, which can be a
- significant compile time sink (See PR 15855). */
- if (aliases_computed_p
- && !bitmap_empty_p (call_clobbered_vars)
- && !(call_flags & ECF_NOVOPS))
- {
- /* A 'pure' or a 'const' function never call-clobbers anything.
- A 'noreturn' function might, but since we don't return anyway
- there is no point in recording that. */
- if (TREE_SIDE_EFFECTS (expr)
- && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
- add_call_clobber_ops (stmt);
- else if (!(call_flags & ECF_CONST))
- add_call_read_ops (stmt);
+ default:
+ /* Notice that if get_expr_operands tries to use &STMT as the
+ operand pointer (which may only happen for USE operands), we
+ will fail in add_stmt_operand. This default will handle
+ statements like empty statements, or CALL_EXPRs that may
+ appear on the RHS of a statement or as statements themselves. */
+ get_expr_operands (stmt, &stmt, opf_use);
+ break;
}
-
- /* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
-
- for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
-
}
-/* Add *VAR_P to the appropriate operand array for INFO. FLAGS is as in
- get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
- the statement's real operands, otherwise it is added to virtual
- operands. */
+/* Create an operands cache for STMT. */
static void
-add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+build_ssa_operands (tree stmt)
{
- bool is_real_op;
- tree var, sym;
- var_ann_t v_ann;
-
- var = *var_p;
- STRIP_NOPS (var);
-
- /* If the operand is an ADDR_EXPR, add its operand to the list of
- variables that have had their address taken in this statement. */
- if (TREE_CODE (var) == ADDR_EXPR)
- {
- note_addressable (TREE_OPERAND (var, 0), s_ann);
- return;
- }
-
- /* If the original variable is not a scalar, it will be added to the list
- of virtual operands. In that case, use its base symbol as the virtual
- variable representing it. */
- is_real_op = is_gimple_reg (var);
- if (!is_real_op && !DECL_P (var))
- var = get_virtual_var (var);
-
- /* If VAR is not a variable that we care to optimize, do nothing. */
- if (var == NULL_TREE || !SSA_VAR_P (var))
- return;
-
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
- v_ann = var_ann (sym);
-
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
- s_ann->has_volatile_ops = true;
-
- /* If the variable cannot be modified and this is a V_MAY_DEF change
- it into a VUSE. This happens when read-only variables are marked
- call-clobbered and/or aliased to writeable variables. So we only
- check that this only happens on stores, and not writes to GIMPLE
- registers.
-
- FIXME: The C++ FE is emitting assignments in the IL stream for
- read-only globals. This is wrong, but for the time being disable
- this transformation on V_MUST_DEF operands (otherwise, we
- mis-optimize SPEC2000's eon). */
- if ((flags & opf_is_def)
- && !(flags & opf_kill_def)
- && unmodifiable_var_p (var))
- {
- gcc_assert (!is_real_op);
- flags &= ~opf_is_def;
- }
-
- if (is_real_op)
- {
- /* The variable is a GIMPLE register. Add it to real operands. */
- if (flags & opf_is_def)
- append_def (var_p);
- else
- append_use (var_p);
- }
- else
- {
- varray_type aliases;
-
- /* The variable is not a GIMPLE register. Add it (or its aliases) to
- virtual operands, unless the caller has specifically requested
- not to add virtual operands (used when adding operands inside an
- ADDR_EXPR expression). */
- if (flags & opf_no_vops)
- return;
-
- aliases = v_ann->may_aliases;
-
- if (aliases == NULL)
- {
- /* The variable is not aliased or it is an alias tag. */
- if (flags & opf_is_def)
- {
- if (flags & opf_kill_def)
- {
- /* Only regular variables or struct fields may get a
- V_MUST_DEF operand. */
- gcc_assert (v_ann->mem_tag_kind == NOT_A_TAG
- || v_ann->mem_tag_kind == STRUCT_FIELD);
- /* V_MUST_DEF for non-aliased, non-GIMPLE register
- variable definitions. */
- append_v_must_def (var);
- }
- else
- {
- /* Add a V_MAY_DEF for call-clobbered variables and
- memory tags. */
- append_v_may_def (var);
- }
- }
- else
- {
- append_vuse (var);
- if (s_ann && v_ann->is_alias_tag)
- s_ann->makes_aliased_loads = 1;
- }
- }
- else
- {
- size_t i;
+ stmt_ann_t ann = get_stmt_ann (stmt);
+
+ /* Initially assume that the statement has no volatile operands and
+ makes no memory references. */
+ ann->has_volatile_ops = false;
+ ann->references_memory = false;
+ /* Just clear the bitmap so we don't end up reallocating it over and over. */
+ if (ann->addresses_taken)
+ bitmap_clear (ann->addresses_taken);
- /* The variable is aliased. Add its aliases to the virtual
- operands. */
- gcc_assert (VARRAY_ACTIVE_SIZE (aliases) != 0);
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (stmt);
+ operand_build_sort_virtual (build_vuses);
+ operand_build_sort_virtual (build_vdefs);
+ finalize_ssa_stmt_operands (stmt);
+
+ if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
+ ann->addresses_taken = NULL;
+ /* For added safety, assume that statements with volatile operands
+ also reference memory. */
+ if (ann->has_volatile_ops)
+ ann->references_memory = true;
+}
- if (flags & opf_is_def)
- {
- bool added_may_defs_p = false;
-
- /* If the variable is also an alias tag, add a virtual
- operand for it, otherwise we will miss representing
- references to the members of the variable's alias set.
- This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
- if (v_ann->is_alias_tag)
- {
- added_may_defs_p = true;
- append_v_may_def (var);
- }
-
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- {
- /* While VAR may be modifiable, some of its aliases
- may not be. If that's the case, we don't really
- need to add them a V_MAY_DEF for them. */
- tree alias = VARRAY_TREE (aliases, i);
-
- if (unmodifiable_var_p (alias))
- append_vuse (alias);
- else
- {
- append_v_may_def (alias);
- added_may_defs_p = true;
- }
- }
-
- if (s_ann && added_may_defs_p)
- s_ann->makes_aliased_stores = 1;
- }
- else
- {
- /* Similarly, append a virtual uses for VAR itself, when
- it is an alias tag. */
- if (v_ann->is_alias_tag)
- append_vuse (var);
- for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- append_vuse (VARRAY_TREE (aliases, i));
+/* Free any operands vectors in OPS. */
- if (s_ann)
- s_ann->makes_aliased_loads = 1;
- }
- }
- }
+void
+free_ssa_operands (stmt_operands_p ops)
+{
+ ops->def_ops = NULL;
+ ops->use_ops = NULL;
+ ops->vdef_ops = NULL;
+ ops->vuse_ops = NULL;
+ BITMAP_FREE (ops->loads);
+ BITMAP_FREE (ops->stores);
}
-
-/* Record that VAR had its address taken in the statement with annotations
- S_ANN. */
-static void
-note_addressable (tree var, stmt_ann_t s_ann)
+/* Get the operands of statement STMT. */
+
+void
+update_stmt_operands (tree stmt)
{
- tree ref;
- subvar_t svars;
- HOST_WIDE_INT offset;
- HOST_WIDE_INT size;
+ stmt_ann_t ann = get_stmt_ann (stmt);
- if (!s_ann)
+ /* If update_stmt_operands is called before SSA is initialized, do
+ nothing. */
+ if (!ssa_operands_active ())
return;
-
- /* If this is a COMPONENT_REF, and we know exactly what it touches, we only
- take the address of the subvariables it will touch.
- Otherwise, we take the address of all the subvariables, plus the real
- ones. */
- if (var && TREE_CODE (var) == COMPONENT_REF
- && (ref = okay_component_ref_for_subvars (var, &offset, &size)))
- {
- subvar_t sv;
- svars = get_subvars_for_var (ref);
-
- if (s_ann->addresses_taken == NULL)
- s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
-
- for (sv = svars; sv; sv = sv->next)
- {
- if (overlap_subvar (offset, size, sv, NULL))
- bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
- }
- return;
- }
-
- var = get_base_address (var);
- if (var && SSA_VAR_P (var))
- {
- if (s_ann->addresses_taken == NULL)
- s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
-
+ /* The optimizers cannot handle statements that are nothing but a
+ _DECL. This indicates a bug in the gimplifier. */
+ gcc_assert (!SSA_VAR_P (stmt));
- if (var_can_have_subvars (var)
- && (svars = get_subvars_for_var (var)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
- }
- else
- bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
- }
+ timevar_push (TV_TREE_OPS);
+
+ gcc_assert (ann->modified);
+ build_ssa_operands (stmt);
+ ann->modified = 0;
+
+ timevar_pop (TV_TREE_OPS);
}
-/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
- clobbered variables in the function. */
-static void
-add_call_clobber_ops (tree stmt)
+/* Copies virtual operands from SRC to DST. */
+
+void
+copy_virtual_operands (tree dest, tree src)
{
- unsigned i;
- tree t;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- struct stmt_ann_d empty_ann;
+ unsigned int i, n;
+ voptype_p src_vuses, dest_vuses;
+ voptype_p src_vdefs, dest_vdefs;
+ struct voptype_d vuse;
+ struct voptype_d vdef;
+ stmt_ann_t dest_ann;
- /* Functions that are not const, pure or never return may clobber
- call-clobbered variables. */
- if (s_ann)
- s_ann->makes_clobbering_call = true;
+ VDEF_OPS (dest) = NULL;
+ VUSE_OPS (dest) = NULL;
- /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
- for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- return;
- }
+ dest_ann = get_stmt_ann (dest);
+ BITMAP_FREE (dest_ann->operands.loads);
+ BITMAP_FREE (dest_ann->operands.stores);
- /* If cache is valid, copy the elements into the build vectors. */
- if (ssa_call_clobbered_cache_valid)
+ if (LOADED_SYMS (src))
{
- for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_vuses); i++)
- {
- t = VARRAY_TREE (clobbered_vuses, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_vuse_list = 1;
- VARRAY_PUSH_TREE (build_vuses, t);
- }
- for (i = 0; i < VARRAY_ACTIVE_SIZE (clobbered_v_may_defs); i++)
- {
- t = VARRAY_TREE (clobbered_v_may_defs, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_v_may_def_list = 1;
- VARRAY_PUSH_TREE (build_v_may_defs, t);
- }
- if (s_ann)
- {
- s_ann->makes_aliased_loads = clobbered_aliased_loads;
- s_ann->makes_aliased_stores = clobbered_aliased_stores;
- }
- return;
+ dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
+ bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
}
- memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
-
- /* Add a V_MAY_DEF operand for every call clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ if (STORED_SYMS (src))
{
- tree var = referenced_var (i);
- if (unmodifiable_var_p (var))
- add_stmt_operand (&var, &empty_ann, opf_none);
- else
- add_stmt_operand (&var, &empty_ann, opf_is_def);
+ dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
+ bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
}
- clobbered_aliased_loads = empty_ann.makes_aliased_loads;
- clobbered_aliased_stores = empty_ann.makes_aliased_stores;
-
- /* Set the flags for a stmt's annotation. */
- if (s_ann)
+ /* Copy all the VUSE operators and corresponding operands. */
+ dest_vuses = &vuse;
+ for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
{
- s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
- s_ann->makes_aliased_stores = empty_ann.makes_aliased_stores;
- }
+ n = VUSE_NUM (src_vuses);
+ dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
+ for (i = 0; i < n; i++)
+ SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
- /* Prepare empty cache vectors. */
- if (clobbered_v_may_defs)
- {
- VARRAY_POP_ALL (clobbered_vuses);
- VARRAY_POP_ALL (clobbered_v_may_defs);
- }
- else
- {
- VARRAY_TREE_INIT (clobbered_v_may_defs, 10, "clobbered_v_may_defs");
- VARRAY_TREE_INIT (clobbered_vuses, 10, "clobbered_vuses");
+ if (VUSE_OPS (dest) == NULL)
+ VUSE_OPS (dest) = vuse.next;
}
- /* Now fill the clobbered cache with the values that have been found. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- VARRAY_PUSH_TREE (clobbered_vuses, VARRAY_TREE (build_vuses, i));
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_v_may_defs); i++)
- VARRAY_PUSH_TREE (clobbered_v_may_defs, VARRAY_TREE (build_v_may_defs, i));
+ /* Copy all the VDEF operators and corresponding operands. */
+ dest_vdefs = &vdef;
+ for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
+ {
+ n = VUSE_NUM (src_vdefs);
+ dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
+ VDEF_RESULT (dest_vdefs) = VDEF_RESULT (src_vdefs);
+ for (i = 0; i < n; i++)
+ SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
- ssa_call_clobbered_cache_valid = true;
+ if (VDEF_OPS (dest) == NULL)
+ VDEF_OPS (dest) = vdef.next;
+ }
}
-/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
- function. */
+/* Specifically for use in DOM's expression analysis. Given a store, we
+ create an artificial stmt which looks like a load from the store, this can
+ be used to eliminate redundant loads. OLD_OPS are the operands from the
+ store stmt, and NEW_STMT is the new load which represents a load of the
+ values stored. */
-static void
-add_call_read_ops (tree stmt)
+void
+create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt)
{
+ tree op;
+ ssa_op_iter iter;
+ use_operand_p use_p;
unsigned i;
- tree t;
- bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
- struct stmt_ann_d empty_ann;
- /* if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
- for the heuristic used to decide whether to create .GLOBAL_VAR. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_none);
- return;
- }
-
- /* If cache is valid, copy the elements into the build vector. */
- if (ssa_ro_call_cache_valid)
- {
- for (i = 0; i < VARRAY_ACTIVE_SIZE (ro_call_vuses); i++)
- {
- t = VARRAY_TREE (ro_call_vuses, i);
- gcc_assert (TREE_CODE (t) != SSA_NAME);
- var_ann (t)->in_vuse_list = 1;
- VARRAY_PUSH_TREE (build_vuses, t);
- }
- if (s_ann)
- s_ann->makes_aliased_loads = ro_call_aliased_loads;
- return;
- }
+ get_stmt_ann (new_stmt);
- memset (&empty_ann, 0, sizeof (struct stmt_ann_d));
+ /* Process NEW_STMT looking for operands. */
+ start_ssa_stmt_operands ();
+ parse_ssa_operands (new_stmt);
- /* Add a VUSE for each call-clobbered variable. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, &empty_ann, opf_none);
- }
+ for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ if (TREE_CODE (op) != SSA_NAME)
+ var_ann (op)->in_vuse_list = false;
+
+ for (i = 0; VEC_iterate (tree, build_vuses, i, op); i++)
+ if (TREE_CODE (op) != SSA_NAME)
+ var_ann (op)->in_vdef_list = false;
- ro_call_aliased_loads = empty_ann.makes_aliased_loads;
- if (s_ann)
- s_ann->makes_aliased_loads = empty_ann.makes_aliased_loads;
+ /* Remove any virtual operands that were found. */
+ VEC_truncate (tree, build_vdefs, 0);
+ VEC_truncate (tree, build_vuses, 0);
- /* Prepare empty cache vectors. */
- if (ro_call_vuses)
- VARRAY_POP_ALL (ro_call_vuses);
- else
- VARRAY_TREE_INIT (ro_call_vuses, 10, "ro_call_vuses");
+ /* For each VDEF on the original statement, we want to create a
+ VUSE of the VDEF result operand on the new statement. */
+ FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
+ append_vuse (op);
- /* Now fill the clobbered cache with the values that have been found. */
- for (i = 0; i < VARRAY_ACTIVE_SIZE (build_vuses); i++)
- VARRAY_PUSH_TREE (ro_call_vuses, VARRAY_TREE (build_vuses, i));
+ finalize_ssa_stmt_operands (new_stmt);
- ssa_ro_call_cache_valid = true;
+ /* All uses in this fake stmt must not be in the immediate use lists. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
+ delink_imm_use (use_p);
}
-/* Copies virtual operands from SRC to DST. */
+
+/* Swap operands EXP0 and EXP1 in statement STMT. No attempt is done
+ to test the validity of the swap operation. */
void
-copy_virtual_operands (tree dst, tree src)
+swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
{
- unsigned i;
- vuse_optype vuses = STMT_VUSE_OPS (src);
- v_may_def_optype v_may_defs = STMT_V_MAY_DEF_OPS (src);
- v_must_def_optype v_must_defs = STMT_V_MUST_DEF_OPS (src);
- vuse_optype *vuses_new = &stmt_ann (dst)->operands.vuse_ops;
- v_may_def_optype *v_may_defs_new = &stmt_ann (dst)->operands.v_may_def_ops;
- v_must_def_optype *v_must_defs_new = &stmt_ann (dst)->operands.v_must_def_ops;
-
- if (vuses)
- {
- *vuses_new = allocate_vuse_optype (NUM_VUSES (vuses));
- for (i = 0; i < NUM_VUSES (vuses); i++)
- initialize_vuse_operand (*vuses_new, i, VUSE_OP (vuses, i), dst, NULL);
- }
+ tree op0, op1;
+ op0 = *exp0;
+ op1 = *exp1;
- if (v_may_defs)
+ /* If the operand cache is active, attempt to preserve the relative
+ positions of these two operands in their respective immediate use
+ lists. */
+ if (ssa_operands_active () && op0 != op1)
{
- *v_may_defs_new = allocate_v_may_def_optype (NUM_V_MAY_DEFS (v_may_defs));
- for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
- {
- initialize_v_may_def_operand (*v_may_defs_new, i,
- V_MAY_DEF_RESULT (v_may_defs, i),
- V_MAY_DEF_OP (v_may_defs, i), dst,
- NULL);
- }
- }
+ use_optype_p use0, use1, ptr;
+ use0 = use1 = NULL;
- if (v_must_defs)
- {
- *v_must_defs_new
- = allocate_v_must_def_optype (NUM_V_MUST_DEFS (v_must_defs));
- for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
- {
- initialize_v_must_def_operand (*v_must_defs_new, i,
- V_MUST_DEF_RESULT (v_must_defs, i),
- V_MUST_DEF_KILL (v_must_defs, i), dst,
- NULL);
+ /* Find the 2 operands in the cache, if they are there. */
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp0)
+ {
+ use0 = ptr;
+ break;
+ }
+
+ for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+ if (USE_OP_PTR (ptr)->use == exp1)
+ {
+ use1 = ptr;
+ break;
+ }
+
+ /* If both uses don't have operand entries, there isn't much we can do
+ at this point. Presumably we don't need to worry about it. */
+ if (use0 && use1)
+ {
+ tree *tmp = USE_OP_PTR (use1)->use;
+ USE_OP_PTR (use1)->use = USE_OP_PTR (use0)->use;
+ USE_OP_PTR (use0)->use = tmp;
}
}
+
+ /* Now swap the data. */
+ *exp0 = op1;
+ *exp1 = op0;
}
-/* Specifically for use in DOM's expression analysis. Given a store, we
- create an artificial stmt which looks like a load from the store, this can
- be used to eliminate redundant loads. OLD_OPS are the operands from the
- store stmt, and NEW_STMT is the new load which represents a load of the
- values stored. */
+/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
+ *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
+ a single variable whose address has been taken or any other valid
+ GIMPLE memory reference (structure reference, array, etc). If the
+ base address of REF is a decl that has sub-variables, also add all
+ of its sub-variables. */
void
-create_ssa_artficial_load_stmt (stmt_operands_p old_ops, tree new_stmt)
+add_to_addressable_set (tree ref, bitmap *addresses_taken)
{
- stmt_ann_t ann;
- tree op;
- stmt_operands_t tmp;
- unsigned j;
-
- memset (&tmp, 0, sizeof (stmt_operands_t));
- ann = get_stmt_ann (new_stmt);
+ tree var;
+ subvar_t svars;
- /* Free operands just in case is was an existing stmt. */
- free_ssa_operands (&(ann->operands));
+ gcc_assert (addresses_taken);
- build_ssa_operands (new_stmt, NULL, &tmp, &(ann->operands));
- free_vuses (&(ann->operands.vuse_ops));
- free_v_may_defs (&(ann->operands.v_may_def_ops));
- free_v_must_defs (&(ann->operands.v_must_def_ops));
-
- /* For each VDEF on the original statement, we want to create a
- VUSE of the V_MAY_DEF result or V_MUST_DEF op on the new
- statement. */
- for (j = 0; j < NUM_V_MAY_DEFS (old_ops->v_may_def_ops); j++)
- {
- op = V_MAY_DEF_RESULT (old_ops->v_may_def_ops, j);
- append_vuse (op);
- }
-
- for (j = 0; j < NUM_V_MUST_DEFS (old_ops->v_must_def_ops); j++)
+ /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
+ as the only thing we take the address of. If VAR is a structure,
+ taking the address of a field means that the whole structure may
+ be referenced using pointer arithmetic. See PR 21407 and the
+ ensuing mailing list discussion. */
+ var = get_base_address (ref);
+ if (var && SSA_VAR_P (var))
{
- op = V_MUST_DEF_RESULT (old_ops->v_must_def_ops, j);
- append_vuse (op);
+ if (*addresses_taken == NULL)
+ *addresses_taken = BITMAP_GGC_ALLOC ();
+
+ if (var_can_have_subvars (var)
+ && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ {
+ bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
+ TREE_ADDRESSABLE (sv->var) = 1;
+ }
+ }
+ else
+ {
+ bitmap_set_bit (*addresses_taken, DECL_UID (var));
+ TREE_ADDRESSABLE (var) = 1;
+ }
}
-
- /* Now set the vuses for this new stmt. */
- ann->operands.vuse_ops = finalize_ssa_vuses (&(tmp.vuse_ops), NULL);
}
+
/* Scan the immediate_use list for VAR making sure its linked properly.
- return RTUE iof there is a problem. */
+ Return TRUE if there is a problem and emit an error message to F. */
bool
verify_imm_links (FILE *f, tree var)
{
- ssa_imm_use_t *ptr, *prev;
- ssa_imm_use_t *list;
+ use_operand_p ptr, prev, list;
int count;
gcc_assert (TREE_CODE (var) == SSA_NAME);
prev = ptr;
ptr = ptr->next;
- /* Avoid infinite loops. */
- if (count++ > 30000)
+
+ /* Avoid infinite loops. 50,000,000 uses probably indicates a
+ problem. */
+ if (count++ > 50000000)
goto error;
}
FOR_EACH_IMM_USE_FAST (use_p, iter, var)
{
- print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
+ if (use_p->stmt == NULL && use_p->use == NULL)
+ fprintf (file, "***end of stmt iterator marker***\n");
+ else
+ if (!is_gimple_reg (USE_FROM_PTR (use_p)))
+ print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
+ else
+ print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
}
fprintf(file, "\n");
}
+
/* Dump all the immediate uses to FILE. */
void
dump_immediate_uses (stderr);
}
+
/* Dump def-use edges on stderr. */
void
dump_immediate_uses_for (stderr, var);
}
-#include "gt-tree-ssa-operands.h"
+
+/* Create a new change buffer for the statement pointed by STMT_P and
+ push the buffer into SCB_STACK. Each change buffer
+ records state information needed to determine what changed in the
+ statement. Mainly, this keeps track of symbols that may need to be
+ put into SSA form, SSA name replacements and other information
+ needed to keep the SSA form up to date. */
+
+void
+push_stmt_changes (tree *stmt_p)
+{
+ tree stmt;
+ scb_t buf;
+
+ stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = xmalloc (sizeof *buf);
+ memset (buf, 0, sizeof *buf);
+
+ buf->stmt_p = stmt_p;
+
+ if (stmt_references_memory_p (stmt))
+ {
+ tree op;
+ ssa_op_iter i;
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (buf->loads == NULL)
+ buf->loads = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (buf->loads, DECL_UID (sym));
+ }
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (buf->stores == NULL)
+ buf->stores = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (buf->stores, DECL_UID (sym));
+ }
+ }
+
+ VEC_safe_push (scb_t, heap, scb_stack, buf);
+}
+
+
+/* Given two sets S1 and S2, mark the symbols that differ in S1 and S2
+ for renaming. The set to mark for renaming is (S1 & ~S2) | (S2 & ~S1). */
+
+static void
+mark_difference_for_renaming (bitmap s1, bitmap s2)
+{
+ if (s1 == NULL && s2 == NULL)
+ return;
+
+ if (s1 && s2 == NULL)
+ mark_set_for_renaming (s1);
+ else if (s1 == NULL && s2)
+ mark_set_for_renaming (s2);
+ else if (!bitmap_equal_p (s1, s2))
+ {
+ bitmap t1 = BITMAP_ALLOC (NULL);
+ bitmap t2 = BITMAP_ALLOC (NULL);
+
+ bitmap_and_compl (t1, s1, s2);
+ bitmap_and_compl (t2, s2, s1);
+ bitmap_ior_into (t1, t2);
+ mark_set_for_renaming (t1);
+
+ BITMAP_FREE (t1);
+ BITMAP_FREE (t2);
+ }
+}
+
+
+/* Pop the top SCB from SCB_STACK and act on the differences between
+ what was recorded by push_stmt_changes and the current state of
+ the statement. */
+
+void
+pop_stmt_changes (tree *stmt_p)
+{
+ tree op, stmt;
+ ssa_op_iter iter;
+ bitmap loads, stores;
+ scb_t buf;
+
+ stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = VEC_pop (scb_t, scb_stack);
+ gcc_assert (stmt_p == buf->stmt_p);
+
+ /* Force an operand re-scan on the statement and mark any newly
+ exposed variables. */
+ update_stmt (stmt);
+
+ /* Determine whether any memory symbols need to be renamed. If the
+ sets of loads and stores are different after the statement is
+ modified, then the affected symbols need to be renamed.
+
+ Note that it may be possible for the statement to not reference
+ memory anymore, but we still need to act on the differences in
+ the sets of symbols. */
+ loads = stores = NULL;
+ if (stmt_references_memory_p (stmt))
+ {
+ tree op;
+ ssa_op_iter i;
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VUSE)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (loads == NULL)
+ loads = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (loads, DECL_UID (sym));
+ }
+
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_VDEF)
+ {
+ tree sym = TREE_CODE (op) == SSA_NAME ? SSA_NAME_VAR (op) : op;
+ if (stores == NULL)
+ stores = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (stores, DECL_UID (sym));
+ }
+ }
+
+ /* If LOADS is different from BUF->LOADS, the affected
+ symbols need to be marked for renaming. */
+ mark_difference_for_renaming (loads, buf->loads);
+
+ /* Similarly for STORES and BUF->STORES. */
+ mark_difference_for_renaming (stores, buf->stores);
+
+ /* Mark all the naked GIMPLE register operands for renaming. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
+ if (DECL_P (op))
+ mark_sym_for_renaming (op);
+
+ /* FIXME, need to add more finalizers here. Cleanup EH info,
+ recompute invariants for address expressions, add
+ SSA replacement mappings, etc. For instance, given
+ testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
+ the form:
+
+ # SMT.4_20 = VDEF <SMT.4_16>
+ D.1576_11 = 1.0e+0;
+
+ So, the VDEF will disappear, but instead of marking SMT.4 for
+ renaming it would be far more efficient to establish a
+ replacement mapping that would replace every reference of
+ SMT.4_20 with SMT.4_16. */
+
+ /* Free memory used by the buffer. */
+ BITMAP_FREE (buf->loads);
+ BITMAP_FREE (buf->stores);
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
+ buf->stmt_p = NULL;
+ free (buf);
+}
+
+
+/* Discard the topmost change buffer from SCB_STACK. This is useful
+ when the caller realized that it did not actually modified the
+ statement. It avoids the expensive operand re-scan. */
+
+void
+discard_stmt_changes (tree *stmt_p)
+{
+ scb_t buf;
+ tree stmt;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ stmt = *stmt_p;
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = VEC_pop (scb_t, scb_stack);
+ gcc_assert (stmt_p == buf->stmt_p);
+
+ /* Free memory used by the buffer. */
+ BITMAP_FREE (buf->loads);
+ BITMAP_FREE (buf->stores);
+ buf->stmt_p = NULL;
+ free (buf);
+}
+
+
+/* Returns true if statement STMT may access memory. */
+
+bool
+stmt_references_memory_p (tree stmt)
+{
+ if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
+ return false;
+
+ return stmt_ann (stmt)->references_memory;
+}