/* Loop invariant motion.
- Copyright (C) 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "params.h"
#include "tree-pass.h"
#include "flags.h"
+#include "real.h"
+#include "hashtab.h"
+
+/* TODO: Support for predicated code motion. I.e.
+
+ while (1)
+ {
+ if (cond)
+ {
+ a = inv;
+ something;
+ }
+ }
+
+ Where COND and INV are is invariants, but evaluating INV may trap or be
+ invalid from some other reason if !COND. This may be transformed to
+
+ if (cond)
+ a = inv;
+ while (1)
+ {
+ if (cond)
+ something;
+ } */
/* A type for the list of statements that have to be moved in order to be able
to hoist an invariant computation. */
? NULL \
: (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
-/* Description of a memory reference for store motion. */
+/* Description of a memory reference location for store motion. */
-struct mem_ref
+struct mem_ref_loc
{
tree *ref; /* The reference itself. */
tree stmt; /* The statement in that it occurs. */
- struct mem_ref *next; /* Next use in the chain. */
+ struct mem_ref_loc *next; /* Next use in the chain. */
+};
+
+/* Description of a memory reference for store motion. */
+
+struct mem_ref
+{
+ tree mem; /* The memory itself. */
+ hashval_t hash; /* Its hash value. */
+ bool is_stored; /* True if there is a store to the location
+ in the loop. */
+ struct mem_ref_loc *locs; /* The locations where it is found. */
+ bitmap vops; /* Vops corresponding to this memory
+ location. */
+ struct mem_ref *next; /* Next memory reference in the list.
+ Memory references are stored in a hash
+ table, but the hash function depends
+ on values of pointers. Thus we cannot use
+ htab_traverse, since then we would get
+ miscompares during bootstrap (although the
+ produced code would be correct). */
};
/* Minimum cost of an expensive expression. */
block will be executed. */
#define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
-static unsigned max_stmt_uid; /* Maximal uid of a statement. Uids to phi
- nodes are assigned using the versions of
- ssa names they define. */
-
-/* Returns uid of statement STMT. */
-
-static unsigned
-get_stmt_uid (tree stmt)
-{
- if (TREE_CODE (stmt) == PHI_NODE)
- return SSA_NAME_VERSION (PHI_RESULT (stmt)) + max_stmt_uid;
-
- return stmt_ann (stmt)->uid;
-}
-
/* Calls CBCK for each index in memory reference ADDR_P. There are two
kinds situations handled; in each of these cases, the memory reference
and DATA are passed to the callback:
case BIT_FIELD_REF:
case VIEW_CONVERT_EXPR:
- case ARRAY_RANGE_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
nxt = &TREE_OPERAND (*addr_p, 0);
break;
case ARRAY_REF:
+ case ARRAY_RANGE_REF:
nxt = &TREE_OPERAND (*addr_p, 0);
if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
return false;
case PARM_DECL:
case STRING_CST:
case RESULT_DECL:
+ case VECTOR_CST:
+ case COMPLEX_CST:
+ case INTEGER_CST:
+ case REAL_CST:
+ return true;
+
+ case TARGET_MEM_REF:
+ idx = &TMR_BASE (*addr_p);
+ if (*idx
+ && !cbck (*addr_p, idx, data))
+ return false;
+ idx = &TMR_INDEX (*addr_p);
+ if (*idx
+ && !cbck (*addr_p, idx, data))
+ return false;
return true;
default:
{
/* If we perform unswitching, force the operands of the invariant
condition to be moved out of the loop. */
- get_stmt_operands (stmt);
-
return MOVE_POSSIBLE;
}
- if (TREE_CODE (stmt) != MODIFY_EXPR)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return MOVE_IMPOSSIBLE;
if (stmt_ends_bb_p (stmt))
return MOVE_IMPOSSIBLE;
- get_stmt_operands (stmt);
-
if (stmt_ann (stmt)->has_volatile_ops)
return MOVE_IMPOSSIBLE;
- lhs = TREE_OPERAND (stmt, 0);
+ lhs = GIMPLE_STMT_OPERAND (stmt, 0);
if (TREE_CODE (lhs) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
return MOVE_IMPOSSIBLE;
- rhs = TREE_OPERAND (stmt, 1);
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
if (TREE_SIDE_EFFECTS (rhs))
return MOVE_IMPOSSIBLE;
|| tree_could_trap_p (rhs))
return MOVE_PRESERVE_EXECUTION;
+ if (get_call_expr_in (stmt))
+ {
+ /* While pure or const call is guaranteed to have no side effects, we
+ cannot move it arbitrarily. Consider code like
+
+ char *s = something ();
+
+ while (1)
+ {
+ if (s)
+ t = strlen (s);
+ else
+ t = 0;
+ }
+
+ Here the strlen call cannot be moved out of the loop, even though
+ s is invariant. In addition to possibly creating a call with
+ invalid arguments, moving out a function call that is not executed
+ may cause performance regressions in case the call is costly and
+ not executed at all. */
+ return MOVE_PRESERVE_EXECUTION;
+ }
return MOVE_POSSIBLE;
}
if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
max_loop = find_common_loop (max_loop,
- LIM_DATA (def_stmt)->max_loop->outer);
+ loop_outer (LIM_DATA (def_stmt)->max_loop));
if (max_loop == loop)
return NULL;
- max_loop = superloop_at_depth (loop, max_loop->depth + 1);
+ max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
return max_loop;
}
if (class != tcc_unary
&& class != tcc_binary
&& class != tcc_expression
+ && class != tcc_vl_exp
&& class != tcc_comparison)
return NULL;
- nops = first_rtl_op (TREE_CODE (expr));
+ nops = TREE_OPERAND_LENGTH (expr);
for (i = 0; i < nops; i++)
{
aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
&& def_bb->loop_father == loop)
data->cost += LIM_DATA (def_stmt)->cost;
- dep = xmalloc (sizeof (struct depend));
+ dep = XNEW (struct depend);
dep->stmt = def_stmt;
dep->next = data->depends;
data->depends = dep;
static unsigned
stmt_cost (tree stmt)
{
- tree lhs, rhs;
+ tree rhs;
unsigned cost = 1;
/* Always try to create possibilities for unswitching. */
if (TREE_CODE (stmt) == COND_EXPR)
return LIM_EXPENSIVE;
- lhs = TREE_OPERAND (stmt, 0);
- rhs = TREE_OPERAND (stmt, 1);
+ rhs = GENERIC_TREE_OPERAND (stmt, 1);
/* Hoisting memory references out should almost surely be a win. */
- if (!is_gimple_variable (lhs))
- cost += 20;
- if (is_gimple_addressable (rhs) && !is_gimple_variable (rhs))
+ if (stmt_references_memory_p (stmt))
cost += 20;
switch (TREE_CODE (rhs))
/* Unless the call is a builtin_constant_p; this always folds to a
constant, so moving it is useless. */
rhs = get_callee_fndecl (rhs);
- if (DECL_BUILT_IN (rhs)
+ if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
return 0;
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
+ case RDIV_EXPR:
/* Division and multiplication are usually expensive. */
cost += 20;
break;
+ case LSHIFT_EXPR:
+ case RSHIFT_EXPR:
+ cost += 20;
+ break;
+
default:
break;
}
stmt_loop = find_common_loop (orig_loop, stmt_loop);
if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
stmt_loop = find_common_loop (stmt_loop,
- LIM_DATA (stmt)->tgt_loop->outer);
+ loop_outer (LIM_DATA (stmt)->tgt_loop));
if (flow_loop_nested_p (stmt_loop, level))
return;
free (data);
}
+/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
+
+static tree
+rewrite_reciprocal (block_stmt_iterator *bsi)
+{
+ tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
+
+ stmt = bsi_stmt (*bsi);
+ lhs = GENERIC_TREE_OPERAND (stmt, 0);
+ rhs = GENERIC_TREE_OPERAND (stmt, 1);
+
+ /* stmt must be GIMPLE_MODIFY_STMT. */
+ var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
+ add_referenced_var (var);
+
+ tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
+ build_real (TREE_TYPE (rhs), dconst1),
+ TREE_OPERAND (rhs, 1));
+ stmt1 = build_gimple_modify_stmt (var, tmp);
+ name = make_ssa_name (var, stmt1);
+ GIMPLE_STMT_OPERAND (stmt1, 0) = name;
+ tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
+ name, TREE_OPERAND (rhs, 0));
+ stmt2 = build_gimple_modify_stmt (lhs, tmp);
+
+ /* Replace division stmt with reciprocal and multiply stmts.
+ The multiply stmt is not invariant, so update iterator
+ and avoid rescanning. */
+ bsi_replace (bsi, stmt1, true);
+ bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
+ SSA_NAME_DEF_STMT (lhs) = stmt2;
+
+ /* Continue processing with invariant reciprocal statement. */
+ return stmt1;
+}
+
+/* Check if the pattern at *BSI is a bittest of the form
+ (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
+
+static tree
+rewrite_bittest (block_stmt_iterator *bsi)
+{
+ tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
+ use_operand_p use;
+
+ stmt = bsi_stmt (*bsi);
+ lhs = GENERIC_TREE_OPERAND (stmt, 0);
+ rhs = GENERIC_TREE_OPERAND (stmt, 1);
+
+ /* Verify that the single use of lhs is a comparison against zero. */
+ if (TREE_CODE (lhs) != SSA_NAME
+ || !single_imm_use (lhs, &use, &use_stmt)
+ || TREE_CODE (use_stmt) != COND_EXPR)
+ return stmt;
+ t = COND_EXPR_COND (use_stmt);
+ if (TREE_OPERAND (t, 0) != lhs
+ || (TREE_CODE (t) != NE_EXPR
+ && TREE_CODE (t) != EQ_EXPR)
+ || !integer_zerop (TREE_OPERAND (t, 1)))
+ return stmt;
+
+ /* Get at the operands of the shift. The rhs is TMP1 & 1. */
+ stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
+ if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+ return stmt;
+
+ /* There is a conversion inbetween possibly inserted by fold. */
+ t = GIMPLE_STMT_OPERAND (stmt1, 1);
+ if (TREE_CODE (t) == NOP_EXPR
+ || TREE_CODE (t) == CONVERT_EXPR)
+ {
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) != SSA_NAME
+ || !has_single_use (t))
+ return stmt;
+ stmt1 = SSA_NAME_DEF_STMT (t);
+ if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+ return stmt;
+ t = GIMPLE_STMT_OPERAND (stmt1, 1);
+ }
+
+ /* Verify that B is loop invariant but A is not. Verify that with
+ all the stmt walking we are still in the same loop. */
+ if (TREE_CODE (t) == RSHIFT_EXPR
+ && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
+ && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
+ loop_containing_stmt (stmt1)) != NULL
+ && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
+ loop_containing_stmt (stmt1)) == NULL)
+ {
+ tree a = TREE_OPERAND (t, 0);
+ tree b = TREE_OPERAND (t, 1);
+
+ /* 1 << B */
+ var = create_tmp_var (TREE_TYPE (a), "shifttmp");
+ add_referenced_var (var);
+ t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
+ build_int_cst (TREE_TYPE (a), 1), b);
+ stmt1 = build_gimple_modify_stmt (var, t);
+ name = make_ssa_name (var, stmt1);
+ GIMPLE_STMT_OPERAND (stmt1, 0) = name;
+
+ /* A & (1 << B) */
+ t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
+ stmt2 = build_gimple_modify_stmt (var, t);
+ name = make_ssa_name (var, stmt2);
+ GIMPLE_STMT_OPERAND (stmt2, 0) = name;
+ SET_USE (use, name);
+
+ bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
+ bsi_replace (bsi, stmt2, true);
+
+ return stmt1;
+ }
+
+ return stmt;
+}
+
+
/* Determine the outermost loops in that statements in basic block BB are
invariant, and record them to the LIM_DATA associated with the statements.
Callback for walk_dominator_tree. */
{
enum move_pos pos;
block_stmt_iterator bsi;
- tree stmt;
+ tree stmt, rhs;
bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
- if (!bb->loop_father->outer)
+ if (!loop_outer (bb->loop_father))
return;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
- bb->index, bb->loop_father->num, bb->loop_father->depth);
+ bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
continue;
}
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+ {
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+
+ /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
+ to be hoisted out of loop, saving expensive divide. */
+ if (pos == MOVE_POSSIBLE
+ && TREE_CODE (rhs) == RDIV_EXPR
+ && flag_unsafe_math_optimizations
+ && !flag_trapping_math
+ && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
+ loop_containing_stmt (stmt)) != NULL
+ && outermost_invariant_loop_expr (rhs,
+ loop_containing_stmt (stmt)) == NULL)
+ stmt = rewrite_reciprocal (&bsi);
+
+ /* If the shift count is invariant, convert (A >> B) & 1 to
+ A & (1 << B) allowing the bit mask to be hoisted out of the loop
+ saving an expensive shift. */
+ if (pos == MOVE_POSSIBLE
+ && TREE_CODE (rhs) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (rhs, 1))
+ && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
+ && has_single_use (TREE_OPERAND (rhs, 0)))
+ stmt = rewrite_bittest (&bsi);
+ }
+
stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
LIM_DATA (stmt)->always_executed_in = outermost;
{
print_generic_stmt_indented (dump_file, stmt, 0, 2);
fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
- LIM_DATA (stmt)->max_loop->depth,
+ loop_depth (LIM_DATA (stmt)->max_loop),
LIM_DATA (stmt)->cost);
}
struct dom_walk_data walk_data;
memset (&walk_data, 0, sizeof (struct dom_walk_data));
+ walk_data.dom_direction = CDI_DOMINATORS;
walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
init_walk_dominator_tree (&walk_data);
fini_walk_dominator_tree (&walk_data);
}
-/* Commits edge insertions and updates loop structures. */
-
-void
-loop_commit_inserts (void)
-{
- unsigned old_last_basic_block, i;
- basic_block bb;
-
- old_last_basic_block = last_basic_block;
- bsi_commit_edge_inserts (NULL);
- for (i = old_last_basic_block; i < (unsigned) last_basic_block; i++)
- {
- bb = BASIC_BLOCK (i);
- add_bb_to_loop (bb,
- find_common_loop (EDGE_SUCC (bb, 0)->dest->loop_father,
- EDGE_PRED (bb, 0)->src->loop_father));
- }
-}
-
/* Hoist the statements in basic block BB out of the loops prescribed by
data stored in LIM_DATA structures associated with each statement. Callback
for walk_dominator_tree. */
tree stmt;
unsigned cost = 0;
- if (!bb->loop_father->outer)
+ if (!loop_outer (bb->loop_father))
return;
for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
cost, level->num);
}
bsi_insert_on_edge (loop_preheader_edge (level), stmt);
- bsi_remove (&bsi);
+ bsi_remove (&bsi, false);
}
}
struct dom_walk_data walk_data;
memset (&walk_data, 0, sizeof (struct dom_walk_data));
+ walk_data.dom_direction = CDI_DOMINATORS;
walk_data.before_dom_children_before_stmts = move_computations_stmt;
init_walk_dominator_tree (&walk_data);
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
fini_walk_dominator_tree (&walk_data);
- loop_commit_inserts ();
- rewrite_into_ssa (false);
- if (bitmap_first_set_bit (vars_to_rename) >= 0)
- {
- /* The rewrite of ssa names may cause violation of loop closed ssa
- form invariants. TODO -- avoid these rewrites completely.
- Information in virtual phi nodes is sufficient for it. */
- rewrite_into_loop_closed_ssa ();
- }
- bitmap_clear (vars_to_rename);
+ bsi_commit_edge_inserts ();
+ if (need_ssa_update_p ())
+ rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
}
/* Checks whether the statement defining variable *INDEX can be hoisted
if (class != tcc_unary
&& class != tcc_binary
&& class != tcc_expression
+ && class != tcc_vl_exp
&& class != tcc_comparison)
return;
- nops = first_rtl_op (TREE_CODE (expr));
+ nops = TREE_OPERAND_LENGTH (expr);
for (i = 0; i < nops; i++)
force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
}
return true;
}
-/* Records memory reference *REF (that occurs in statement STMT)
- to the list MEM_REFS. */
+/* Records memory reference location *REF to the list MEM_REFS. The reference
+ occurs in statement STMT. */
static void
-record_mem_ref (struct mem_ref **mem_refs, tree stmt, tree *ref)
+record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
{
- struct mem_ref *aref = xmalloc (sizeof (struct mem_ref));
+ struct mem_ref_loc *aref = XNEW (struct mem_ref_loc);
aref->stmt = stmt;
aref->ref = ref;
*mem_refs = aref;
}
-/* Releases list of memory references MEM_REFS. */
+/* Releases list of memory reference locations MEM_REFS. */
static void
-free_mem_refs (struct mem_ref *mem_refs)
+free_mem_ref_locs (struct mem_ref_loc *mem_refs)
{
- struct mem_ref *act;
+ struct mem_ref_loc *act;
while (mem_refs)
{
}
}
-/* If VAR is defined in LOOP and the statement it is defined in does not belong
- to the set SEEN, add the statement to QUEUE of length IN_QUEUE and
- to the set SEEN. */
+/* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
static void
-maybe_queue_var (tree var, struct loop *loop,
- sbitmap seen, tree *queue, unsigned *in_queue)
+rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
{
- tree stmt = SSA_NAME_DEF_STMT (var);
- basic_block def_bb = bb_for_stmt (stmt);
-
- if (!def_bb
- || !flow_bb_inside_loop_p (loop, def_bb)
- || TEST_BIT (seen, get_stmt_uid (stmt)))
- return;
-
- SET_BIT (seen, get_stmt_uid (stmt));
- queue[(*in_queue)++] = stmt;
-}
-
-/* If COMMON_REF is NULL, set COMMON_REF to *OP and return true.
- Otherwise return true if the memory reference *OP is equal to COMMON_REF.
- Record the reference OP to list MEM_REFS. STMT is the statement in that
- the reference occurs. */
-
-struct sra_data
-{
- struct mem_ref **mem_refs;
- tree common_ref;
- tree stmt;
-};
-
-static bool
-fem_single_reachable_address (tree *op, void *data)
-{
- struct sra_data *sra_data = data;
-
- if (sra_data->common_ref
- && !operand_equal_p (*op, sra_data->common_ref, 0))
- return false;
- sra_data->common_ref = *op;
-
- record_mem_ref (sra_data->mem_refs, sra_data->stmt, op);
- return true;
-}
-
-/* Runs CALLBACK for each operand of STMT that is a memory reference. DATA
- is passed to the CALLBACK as well. The traversal stops if CALLBACK
- returns false, for_each_memref then returns false as well. Otherwise
- for_each_memref returns true. */
-
-static bool
-for_each_memref (tree stmt, bool (*callback)(tree *, void *), void *data)
-{
- tree *op;
-
- if (TREE_CODE (stmt) == RETURN_EXPR)
- stmt = TREE_OPERAND (stmt, 1);
+ tree var;
+ ssa_op_iter iter;
- if (TREE_CODE (stmt) == MODIFY_EXPR)
+ for (; mem_refs; mem_refs = mem_refs->next)
{
- op = &TREE_OPERAND (stmt, 0);
- if (TREE_CODE (*op) != SSA_NAME
- && !callback (op, data))
- return false;
-
- op = &TREE_OPERAND (stmt, 1);
- if (TREE_CODE (*op) != SSA_NAME
- && is_gimple_lvalue (*op)
- && !callback (op, data))
- return false;
+ FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
+ mark_sym_for_renaming (SSA_NAME_VAR (var));
- stmt = TREE_OPERAND (stmt, 1);
+ *mem_refs->ref = tmp_var;
+ update_stmt (mem_refs->stmt);
}
+}
- if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
- stmt = TREE_OPERAND (stmt, 0);
-
- if (TREE_CODE (stmt) == CALL_EXPR)
- {
- tree args;
+/* The name and the length of the currently generated variable
+ for lsm. */
+#define MAX_LSM_NAME_LENGTH 40
+static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
+static int lsm_tmp_name_length;
- for (args = TREE_OPERAND (stmt, 1); args; args = TREE_CHAIN (args))
- {
- op = &TREE_VALUE (args);
+/* Adds S to lsm_tmp_name. */
- if (TREE_CODE (*op) != SSA_NAME
- && is_gimple_lvalue (*op)
- && !callback (op, data))
- return false;
- }
- }
+static void
+lsm_tmp_name_add (const char *s)
+{
+ int l = strlen (s) + lsm_tmp_name_length;
+ if (l > MAX_LSM_NAME_LENGTH)
+ return;
- return true;
+ strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
+ lsm_tmp_name_length = l;
}
-/* Determine whether all memory references inside the LOOP that correspond
- to virtual ssa names defined in statement STMT are equal.
- If so, store the list of the references to MEM_REFS, and return one
- of them. Otherwise store NULL to MEM_REFS and return NULL_TREE.
- *SEEN_CALL_STMT is set to true if the virtual operands suggest
- that the reference might be clobbered by a call inside the LOOP. */
+/* Stores the name for temporary variable that replaces REF to
+ lsm_tmp_name. */
-static tree
-single_reachable_address (struct loop *loop, tree stmt,
- struct mem_ref **mem_refs,
- bool *seen_call_stmt)
+static void
+gen_lsm_tmp_name (tree ref)
{
- unsigned max_uid = max_stmt_uid + num_ssa_names;
- tree *queue = xmalloc (sizeof (tree) * max_uid);
- sbitmap seen = sbitmap_alloc (max_uid);
- unsigned in_queue = 1;
- dataflow_t df;
- unsigned i, n;
- struct sra_data sra_data;
- tree call;
- tree val;
- ssa_op_iter iter;
-
- sbitmap_zero (seen);
-
- *mem_refs = NULL;
- sra_data.mem_refs = mem_refs;
- sra_data.common_ref = NULL_TREE;
+ const char *name;
- queue[0] = stmt;
- SET_BIT (seen, get_stmt_uid (stmt));
- *seen_call_stmt = false;
-
- while (in_queue)
+ switch (TREE_CODE (ref))
{
- stmt = queue[--in_queue];
- sra_data.stmt = stmt;
-
- if (LIM_DATA (stmt)
- && LIM_DATA (stmt)->sm_done)
- goto fail;
-
- switch (TREE_CODE (stmt))
- {
- case MODIFY_EXPR:
- case CALL_EXPR:
- case RETURN_EXPR:
- if (!for_each_memref (stmt, fem_single_reachable_address,
- &sra_data))
- goto fail;
-
- /* If this is a function that may depend on the memory location,
- record the fact. We cannot directly refuse call clobbered
- operands here, since sra_data.common_ref did not have
- to be set yet. */
- call = get_call_expr_in (stmt);
- if (call
- && !(call_expr_flags (call) & ECF_CONST))
- *seen_call_stmt = true;
-
- /* Traverse also definitions of the VUSES (there may be other
- distinct from the one we used to get to this statement). */
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
- maybe_queue_var (val, loop, seen, queue, &in_queue);
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case INDIRECT_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_");
+ break;
- break;
+ case BIT_FIELD_REF:
+ case VIEW_CONVERT_EXPR:
+ case ARRAY_RANGE_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ break;
- case PHI_NODE:
- for (i = 0; i < (unsigned) PHI_NUM_ARGS (stmt); i++)
- maybe_queue_var (PHI_ARG_DEF (stmt, i), loop,
- seen, queue, &in_queue);
- break;
+ case REALPART_EXPR:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_RE");
+ break;
+
+ case IMAGPART_EXPR:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_IM");
+ break;
- default:
- goto fail;
- }
+ case COMPONENT_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_");
+ name = get_name (TREE_OPERAND (ref, 1));
+ if (!name)
+ name = "F";
+ lsm_tmp_name_add ("_");
+ lsm_tmp_name_add (name);
+
+ case ARRAY_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_I");
+ break;
- /* Find uses of virtual names. */
- df = get_immediate_uses (stmt);
- n = num_immediate_uses (df);
+ case SSA_NAME:
+ ref = SSA_NAME_VAR (ref);
+ /* Fallthru. */
- for (i = 0; i < n; i++)
- {
- stmt = immediate_use (df, i);
+ case VAR_DECL:
+ case PARM_DECL:
+ name = get_name (ref);
+ if (!name)
+ name = "D";
+ lsm_tmp_name_add (name);
+ break;
- if (!flow_bb_inside_loop_p (loop, bb_for_stmt (stmt)))
- continue;
+ case STRING_CST:
+ lsm_tmp_name_add ("S");
+ break;
- if (TEST_BIT (seen, get_stmt_uid (stmt)))
- continue;
- SET_BIT (seen, get_stmt_uid (stmt));
+ case RESULT_DECL:
+ lsm_tmp_name_add ("R");
+ break;
- queue[in_queue++] = stmt;
- }
+ default:
+ gcc_unreachable ();
}
-
- free (queue);
- sbitmap_free (seen);
-
- return sra_data.common_ref;
-
-fail:
- free_mem_refs (*mem_refs);
- *mem_refs = NULL;
- free (queue);
- sbitmap_free (seen);
-
- return NULL;
}
-/* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
+/* Determines name for temporary variable that replaces REF.
+ The name is accumulated into the lsm_tmp_name variable.
+ N is added to the name of the temporary. */
-static void
-rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
+char *
+get_lsm_tmp_name (tree ref, unsigned n)
{
- tree var;
- ssa_op_iter iter;
+ char ns[2];
- for (; mem_refs; mem_refs = mem_refs->next)
+ lsm_tmp_name_length = 0;
+ gen_lsm_tmp_name (ref);
+ lsm_tmp_name_add ("_lsm");
+ if (n < 10)
{
- FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter,
- (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE))
- {
- var = SSA_NAME_VAR (var);
- bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
- }
-
- *mem_refs->ref = tmp_var;
- modify_stmt (mem_refs->stmt);
+ ns[0] = '0' + n;
+ ns[1] = 0;
+ lsm_tmp_name_add (ns);
}
+ return lsm_tmp_name;
}
/* Records request for store motion of memory reference REF from LOOP.
MEM_REFS is the list of occurrences of the reference REF inside LOOP;
these references are rewritten by a new temporary variable.
- Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
- The initialization of the temporary variable is put to the preheader
- of the loop, and assignments to the reference from the temporary variable
- are emitted to exits. */
+ Exits from the LOOP are stored in EXITS. The initialization of the
+ temporary variable is put to the preheader of the loop, and assignments
+ to the reference from the temporary variable are emitted to exits. */
static void
-schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
- struct mem_ref *mem_refs)
+schedule_sm (struct loop *loop, VEC (edge, heap) *exits, tree ref,
+ struct mem_ref_loc *mem_refs)
{
- struct mem_ref *aref;
+ struct mem_ref_loc *aref;
tree tmp_var;
unsigned i;
tree load, store;
struct fmt_data fmt_data;
+ edge ex;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " from loop %d\n", loop->num);
}
- tmp_var = make_rename_temp (TREE_TYPE (ref), "lsm_tmp");
+ tmp_var = make_rename_temp (TREE_TYPE (ref),
+ get_lsm_tmp_name (ref, ~0));
fmt_data.loop = loop;
fmt_data.orig_loop = loop;
LIM_DATA (aref->stmt)->sm_done = true;
/* Emit the load & stores. */
- load = build (MODIFY_EXPR, void_type_node, tmp_var, ref);
+ load = build_gimple_modify_stmt (tmp_var, ref);
get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
LIM_DATA (load)->max_loop = loop;
LIM_DATA (load)->tgt_loop = loop;
all dependencies. */
bsi_insert_on_edge (loop_latch_edge (loop), load);
- for (i = 0; i < n_exits; i++)
+ for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
{
- store = build (MODIFY_EXPR, void_type_node,
- unshare_expr (ref), tmp_var);
- bsi_insert_on_edge (exits[i], store);
+ store = build_gimple_modify_stmt (unshare_expr (ref), tmp_var);
+ bsi_insert_on_edge (ex, store);
}
}
-/* Returns true if REF may be clobbered by calls. */
-
-static bool
-is_call_clobbered_ref (tree ref)
-{
- tree base;
-
- base = get_base_address (ref);
- if (!base)
- return true;
-
- if (DECL_P (base))
- return is_call_clobbered (base);
-
- if (TREE_CODE (base) == INDIRECT_REF
- || TREE_CODE (base) == ALIGN_INDIRECT_REF
- || TREE_CODE (base) == MISALIGNED_INDIRECT_REF)
- {
- /* Check whether the alias tags associated with the pointer
- are call clobbered. */
- tree ptr = TREE_OPERAND (base, 0);
- struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
- tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
- tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
-
- if ((nmt && is_call_clobbered (nmt))
- || (tmt && is_call_clobbered (tmt)))
- return true;
-
- return false;
- }
-
- gcc_unreachable ();
-}
-
-/* Determine whether all memory references inside LOOP corresponding to the
- virtual ssa name REG are equal to each other, and whether the address of
- this common reference can be hoisted outside of the loop. If this is true,
- prepare the statements that load the value of the memory reference to a
- temporary variable in the loop preheader, store it back on the loop exits,
- and replace all the references inside LOOP by this temporary variable.
- LOOP has N_EXITS stored in EXITS. */
+/* Check whether memory reference REF can be hoisted out of the LOOP. If this
+ is true, prepare the statements that load the value of the memory reference
+ to a temporary variable in the loop preheader, store it back on the loop
+ exits, and replace all the references inside LOOP by this temporary variable.
+ EXITS is the list of exits of LOOP. CLOBBERED_VOPS is the bitmap of virtual
+ operands that are clobbered by a call or accessed through multiple references
+ in loop. */
static void
-determine_lsm_reg (struct loop *loop, edge *exits, unsigned n_exits, tree reg)
+determine_lsm_ref (struct loop *loop, VEC (edge, heap) *exits,
+ bitmap clobbered_vops, struct mem_ref *ref)
{
- tree ref;
- struct mem_ref *mem_refs, *aref;
+ struct mem_ref_loc *aref;
struct loop *must_exec;
- bool sees_call;
-
- if (is_gimple_reg (reg))
- return;
-
- ref = single_reachable_address (loop, SSA_NAME_DEF_STMT (reg), &mem_refs,
- &sees_call);
- if (!ref)
- return;
-
- /* If we cannot create a ssa name for the result, give up. */
- if (!is_gimple_reg_type (TREE_TYPE (ref))
- || TREE_THIS_VOLATILE (ref))
- goto fail;
- /* If there is a call that may use the location, give up as well. */
- if (sees_call
- && is_call_clobbered_ref (ref))
- goto fail;
+ /* In case the memory is not stored to, there is nothing for SM to do. */
+ if (!ref->is_stored)
+ return;
- if (!for_each_index (&ref, may_move_till, loop))
- goto fail;
+ /* If the reference is aliased with any different ref, or killed by call
+ in function, then fail. */
+ if (bitmap_intersect_p (ref->vops, clobbered_vops))
+ return;
- if (tree_could_trap_p (ref))
+ if (tree_could_trap_p (ref->mem))
{
/* If the memory access is unsafe (i.e. it might trap), ensure that some
of the statements in that it occurs is always executed when the loop
least one of the statements containing the memory reference is
executed. */
- for (aref = mem_refs; aref; aref = aref->next)
+ for (aref = ref->locs; aref; aref = aref->next)
{
if (!LIM_DATA (aref->stmt))
continue;
}
if (!aref)
- goto fail;
+ return;
}
- schedule_sm (loop, exits, n_exits, ref, mem_refs);
+ schedule_sm (loop, exits, ref->mem, ref->locs);
+}
-fail: ;
- free_mem_refs (mem_refs);
+/* Hoists memory references MEM_REFS out of LOOP. CLOBBERED_VOPS is the list
+ of vops clobbered by call in loop or accessed by multiple memory references.
+ EXITS is the list of exit edges of the LOOP. */
+
+static void
+hoist_memory_references (struct loop *loop, struct mem_ref *mem_refs,
+ bitmap clobbered_vops, VEC (edge, heap) *exits)
+{
+ struct mem_ref *ref;
+
+ for (ref = mem_refs; ref; ref = ref->next)
+ determine_lsm_ref (loop, exits, clobbered_vops, ref);
}
-/* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
+/* Checks whether LOOP (with exits stored in EXITS array) is suitable
for a store motion optimization (i.e. whether we can insert statement
on its exits). */
static bool
-loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, edge *exits,
- unsigned n_exits)
+loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
+ VEC (edge, heap) *exits)
{
unsigned i;
+ edge ex;
- for (i = 0; i < n_exits; i++)
- if (exits[i]->flags & EDGE_ABNORMAL)
+ for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
+ if (ex->flags & EDGE_ABNORMAL)
return false;
return true;
}
+/* A hash function for struct mem_ref object OBJ. */
+
+static hashval_t
+memref_hash (const void *obj)
+{
+ const struct mem_ref *mem = obj;
+
+ return mem->hash;
+}
+
+/* An equality function for struct mem_ref object OBJ1 with
+ memory reference OBJ2. */
+
+static int
+memref_eq (const void *obj1, const void *obj2)
+{
+ const struct mem_ref *mem1 = obj1;
+
+ return operand_equal_p (mem1->mem, (tree) obj2, 0);
+}
+
+/* Gathers memory references in statement STMT in LOOP, storing the
+ information about them in MEM_REFS hash table. Note vops accessed through
+ unrecognized statements in CLOBBERED_VOPS. The newly created references
+ are also stored to MEM_REF_LIST. */
+
+static void
+gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
+ bitmap clobbered_vops, tree stmt,
+ struct mem_ref **mem_ref_list)
+{
+ tree *lhs, *rhs, *mem = NULL;
+ hashval_t hash;
+ PTR *slot;
+ struct mem_ref *ref = NULL;
+ ssa_op_iter oi;
+ tree vname;
+ bool is_stored;
+
+ if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
+ return;
+
+ /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+ goto fail;
+
+ lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
+ rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
+
+ if (TREE_CODE (*lhs) == SSA_NAME)
+ {
+ if (!is_gimple_addressable (*rhs))
+ goto fail;
+
+ mem = rhs;
+ is_stored = false;
+ }
+ else if (TREE_CODE (*rhs) == SSA_NAME
+ || is_gimple_min_invariant (*rhs))
+ {
+ mem = lhs;
+ is_stored = true;
+ }
+ else
+ goto fail;
+
+ /* If we cannot create an SSA name for the result, give up. */
+ if (!is_gimple_reg_type (TREE_TYPE (*mem))
+ || TREE_THIS_VOLATILE (*mem))
+ goto fail;
+
+ /* If we cannot move the reference out of the loop, fail. */
+ if (!for_each_index (mem, may_move_till, loop))
+ goto fail;
+
+ hash = iterative_hash_expr (*mem, 0);
+ slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
+
+ if (*slot)
+ ref = *slot;
+ else
+ {
+ ref = XNEW (struct mem_ref);
+ ref->mem = *mem;
+ ref->hash = hash;
+ ref->locs = NULL;
+ ref->is_stored = false;
+ ref->vops = BITMAP_ALLOC (NULL);
+ ref->next = *mem_ref_list;
+ *mem_ref_list = ref;
+ *slot = ref;
+ }
+ ref->is_stored |= is_stored;
+
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
+ bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
+ record_mem_ref_loc (&ref->locs, stmt, mem);
+ return;
+
+fail:
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
+ bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
+}
+
+/* Gathers memory references in LOOP. Notes vops accessed through unrecognized
+ statements in CLOBBERED_VOPS. The list of the references found by
+ the function is returned. */
+
+static struct mem_ref *
+gather_mem_refs (struct loop *loop, bitmap clobbered_vops)
+{
+ basic_block *body = get_loop_body (loop);
+ block_stmt_iterator bsi;
+ unsigned i;
+ struct mem_ref *mem_ref_list = NULL;
+ htab_t mem_refs = htab_create (100, memref_hash, memref_eq, NULL);
+
+ for (i = 0; i < loop->num_nodes; i++)
+ {
+ for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
+ gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi),
+ &mem_ref_list);
+ }
+
+ free (body);
+
+ htab_delete (mem_refs);
+ return mem_ref_list;
+}
+
+/* Finds the vops accessed by more than one of the memory references described
+ in MEM_REFS and marks them in CLOBBERED_VOPS. */
+
+static void
+find_more_ref_vops (struct mem_ref *mem_refs, bitmap clobbered_vops)
+{
+ bitmap_head tmp, all_vops;
+ struct mem_ref *ref;
+
+ bitmap_initialize (&tmp, &bitmap_default_obstack);
+ bitmap_initialize (&all_vops, &bitmap_default_obstack);
+
+ for (ref = mem_refs; ref; ref = ref->next)
+ {
+ /* The vops that are already in all_vops are accessed by more than
+ one memory reference. */
+ bitmap_and (&tmp, &all_vops, ref->vops);
+ bitmap_ior_into (clobbered_vops, &tmp);
+ bitmap_clear (&tmp);
+
+ bitmap_ior_into (&all_vops, ref->vops);
+ }
+
+ bitmap_clear (&all_vops);
+}
+
+/* Releases the memory occupied by REF. */
+
+static void
+free_mem_ref (struct mem_ref *ref)
+{
+ free_mem_ref_locs (ref->locs);
+ BITMAP_FREE (ref->vops);
+ free (ref);
+}
+
+/* Releases the memory occupied by REFS. */
+
+static void
+free_mem_refs (struct mem_ref *refs)
+{
+ struct mem_ref *ref, *next;
+
+ for (ref = refs; ref; ref = next)
+ {
+ next = ref->next;
+ free_mem_ref (ref);
+ }
+}
+
/* Try to perform store motion for all memory references modified inside
LOOP. */
static void
determine_lsm_loop (struct loop *loop)
{
- tree phi;
- unsigned n_exits;
- edge *exits = get_loop_exit_edges (loop, &n_exits);
+ VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ bitmap clobbered_vops;
+ struct mem_ref *mem_refs;
- if (!loop_suitable_for_sm (loop, exits, n_exits))
+ if (!loop_suitable_for_sm (loop, exits))
{
- free (exits);
+ VEC_free (edge, heap, exits);
return;
}
- for (phi = phi_nodes (loop->header); phi; phi = TREE_CHAIN (phi))
- determine_lsm_reg (loop, exits, n_exits, PHI_RESULT (phi));
+ /* Find the memory references in LOOP. */
+ clobbered_vops = BITMAP_ALLOC (NULL);
+ mem_refs = gather_mem_refs (loop, clobbered_vops);
+
+ /* Find the vops that are used for more than one reference. */
+ find_more_ref_vops (mem_refs, clobbered_vops);
- free (exits);
+ /* Hoist all suitable memory references. */
+ hoist_memory_references (loop, mem_refs, clobbered_vops, exits);
+
+ free_mem_refs (mem_refs);
+ VEC_free (edge, heap, exits);
+ BITMAP_FREE (clobbered_vops);
}
/* Try to perform store motion for all memory references modified inside
- any of LOOPS. */
+ loops. */
static void
-determine_lsm (struct loops *loops)
+determine_lsm (void)
{
struct loop *loop;
- basic_block bb;
+ loop_iterator li;
- /* Create a UID for each statement in the function. Ordering of the
- UIDs is not important for this pass. */
- max_stmt_uid = 0;
- FOR_EACH_BB (bb)
- {
- block_stmt_iterator bsi;
+ /* Pass the loops from the outermost and perform the store motion as
+ suitable. */
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- stmt_ann (bsi_stmt (bsi))->uid = max_stmt_uid++;
- }
-
- compute_immediate_uses (TDFA_USE_VOPS, NULL);
-
- /* Pass the loops from the outermost. For each virtual operand loop phi node
- check whether all the references inside the loop correspond to a single
- address, and if so, move them. */
-
- loop = loops->tree_root->inner;
- while (1)
+ FOR_EACH_LOOP (li, loop, 0)
{
determine_lsm_loop (loop);
-
- if (loop->inner)
- {
- loop = loop->inner;
- continue;
- }
- while (!loop->next)
- {
- loop = loop->outer;
- if (loop == loops->tree_root)
- {
- free_df ();
- loop_commit_inserts ();
- return;
- }
- }
- loop = loop->next;
}
+
+ bsi_commit_edge_inserts ();
}
/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
fill_always_executed_in (loop, contains_call);
}
-/* Compute the global information needed by the loop invariant motion pass.
- LOOPS is the loop tree. */
+/* Compute the global information needed by the loop invariant motion pass. */
static void
-tree_ssa_lim_initialize (struct loops *loops)
+tree_ssa_lim_initialize (void)
{
sbitmap contains_call = sbitmap_alloc (last_basic_block);
block_stmt_iterator bsi;
SET_BIT (contains_call, bb->index);
}
- for (loop = loops->tree_root->inner; loop; loop = loop->next)
+ for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
fill_always_executed_in (loop, contains_call);
sbitmap_free (contains_call);
}
}
-/* Moves invariants from LOOPS. Only "expensive" invariants are moved out --
+/* Moves invariants from loops. Only "expensive" invariants are moved out --
i.e. those that are likely to be win regardless of the register pressure. */
void
-tree_ssa_lim (struct loops *loops)
+tree_ssa_lim (void)
{
- tree_ssa_lim_initialize (loops);
+ tree_ssa_lim_initialize ();
/* For each statement determine the outermost loop in that it is
invariant and cost for computing the invariant. */
/* For each memory reference determine whether it is possible to hoist it
out of the loop. Force the necessary invariants to be moved out of the
loops as well. */
- determine_lsm (loops);
+ determine_lsm ();
/* Move the expressions that are expensive enough. */
move_computations ();