/* Loop invariant motion.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
#include "config.h"
#include "system.h"
#define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
? NULL \
- : (struct lim_aux_data *) (stmt_ann (STMT)->aux))
+ : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
/* Description of a memory reference location for store motion. */
struct mem_ref_loc *locs; /* The locations where it is found. */
bitmap vops; /* Vops corresponding to this memory
location. */
+ struct mem_ref *next; /* Next memory reference in the list.
+ Memory references are stored in a hash
+ table, but the hash function depends
+ on values of pointers. Thus we cannot use
+ htab_traverse, since then we would get
+ miscompares during bootstrap (although the
+ produced code would be correct). */
};
/* Minimum cost of an expensive expression. */
case BIT_FIELD_REF:
case VIEW_CONVERT_EXPR:
- case ARRAY_RANGE_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
nxt = &TREE_OPERAND (*addr_p, 0);
break;
case ARRAY_REF:
+ case ARRAY_RANGE_REF:
nxt = &TREE_OPERAND (*addr_p, 0);
if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
return false;
case PARM_DECL:
case STRING_CST:
case RESULT_DECL:
+ case VECTOR_CST:
+ case COMPLEX_CST:
+ case INTEGER_CST:
+ case REAL_CST:
+ return true;
+
+ case TARGET_MEM_REF:
+ idx = &TMR_BASE (*addr_p);
+ if (*idx
+ && !cbck (*addr_p, idx, data))
+ return false;
+ idx = &TMR_INDEX (*addr_p);
+ if (*idx
+ && !cbck (*addr_p, idx, data))
+ return false;
return true;
default:
return MOVE_POSSIBLE;
}
- if (TREE_CODE (stmt) != MODIFY_EXPR)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return MOVE_IMPOSSIBLE;
if (stmt_ends_bb_p (stmt))
if (stmt_ann (stmt)->has_volatile_ops)
return MOVE_IMPOSSIBLE;
- lhs = TREE_OPERAND (stmt, 0);
+ lhs = GIMPLE_STMT_OPERAND (stmt, 0);
if (TREE_CODE (lhs) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
return MOVE_IMPOSSIBLE;
- rhs = TREE_OPERAND (stmt, 1);
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
if (TREE_SIDE_EFFECTS (rhs))
return MOVE_IMPOSSIBLE;
if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
max_loop = find_common_loop (max_loop,
- LIM_DATA (def_stmt)->max_loop->outer);
+ loop_outer (LIM_DATA (def_stmt)->max_loop));
if (max_loop == loop)
return NULL;
- max_loop = superloop_at_depth (loop, max_loop->depth + 1);
+ max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
return max_loop;
}
if (class != tcc_unary
&& class != tcc_binary
&& class != tcc_expression
+ && class != tcc_vl_exp
&& class != tcc_comparison)
return NULL;
- nops = TREE_CODE_LENGTH (TREE_CODE (expr));
+ nops = TREE_OPERAND_LENGTH (expr);
for (i = 0; i < nops; i++)
{
aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
&& def_bb->loop_father == loop)
data->cost += LIM_DATA (def_stmt)->cost;
- dep = xmalloc (sizeof (struct depend));
+ dep = XNEW (struct depend);
dep->stmt = def_stmt;
dep->next = data->depends;
data->depends = dep;
if (TREE_CODE (stmt) == COND_EXPR)
return LIM_EXPENSIVE;
- rhs = TREE_OPERAND (stmt, 1);
+ rhs = GENERIC_TREE_OPERAND (stmt, 1);
/* Hoisting memory references out should almost surely be a win. */
if (stmt_references_memory_p (stmt))
cost += 20;
break;
+ case LSHIFT_EXPR:
+ case RSHIFT_EXPR:
+ cost += 20;
+ break;
+
default:
break;
}
if (!add_dependency (val, lim_data, loop, true))
return false;
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
+ FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
if (!add_dependency (val, lim_data, loop, false))
return false;
stmt_loop = find_common_loop (orig_loop, stmt_loop);
if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
stmt_loop = find_common_loop (stmt_loop,
- LIM_DATA (stmt)->tgt_loop->outer);
+ loop_outer (LIM_DATA (stmt)->tgt_loop));
if (flow_loop_nested_p (stmt_loop, level))
return;
free (data);
}
+/* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
+
+static tree
+rewrite_reciprocal (block_stmt_iterator *bsi)
+{
+ tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
+
+ stmt = bsi_stmt (*bsi);
+ lhs = GENERIC_TREE_OPERAND (stmt, 0);
+ rhs = GENERIC_TREE_OPERAND (stmt, 1);
+
+ /* stmt must be GIMPLE_MODIFY_STMT. */
+ var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
+ add_referenced_var (var);
+
+ tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
+ build_real (TREE_TYPE (rhs), dconst1),
+ TREE_OPERAND (rhs, 1));
+ stmt1 = build_gimple_modify_stmt (var, tmp);
+ name = make_ssa_name (var, stmt1);
+ GIMPLE_STMT_OPERAND (stmt1, 0) = name;
+ tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
+ name, TREE_OPERAND (rhs, 0));
+ stmt2 = build_gimple_modify_stmt (lhs, tmp);
+
+ /* Replace division stmt with reciprocal and multiply stmts.
+ The multiply stmt is not invariant, so update iterator
+ and avoid rescanning. */
+ bsi_replace (bsi, stmt1, true);
+ bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
+ SSA_NAME_DEF_STMT (lhs) = stmt2;
+
+ /* Continue processing with invariant reciprocal statement. */
+ return stmt1;
+}
+
+/* Check if the pattern at *BSI is a bittest of the form
+ (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
+
+static tree
+rewrite_bittest (block_stmt_iterator *bsi)
+{
+ tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
+ use_operand_p use;
+
+ stmt = bsi_stmt (*bsi);
+ lhs = GENERIC_TREE_OPERAND (stmt, 0);
+ rhs = GENERIC_TREE_OPERAND (stmt, 1);
+
+ /* Verify that the single use of lhs is a comparison against zero. */
+ if (TREE_CODE (lhs) != SSA_NAME
+ || !single_imm_use (lhs, &use, &use_stmt)
+ || TREE_CODE (use_stmt) != COND_EXPR)
+ return stmt;
+ t = COND_EXPR_COND (use_stmt);
+ if (TREE_OPERAND (t, 0) != lhs
+ || (TREE_CODE (t) != NE_EXPR
+ && TREE_CODE (t) != EQ_EXPR)
+ || !integer_zerop (TREE_OPERAND (t, 1)))
+ return stmt;
+
+ /* Get at the operands of the shift. The rhs is TMP1 & 1. */
+ stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
+ if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+ return stmt;
+
+ /* There is a conversion inbetween possibly inserted by fold. */
+ t = GIMPLE_STMT_OPERAND (stmt1, 1);
+ if (TREE_CODE (t) == NOP_EXPR
+ || TREE_CODE (t) == CONVERT_EXPR)
+ {
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) != SSA_NAME
+ || !has_single_use (t))
+ return stmt;
+ stmt1 = SSA_NAME_DEF_STMT (t);
+ if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+ return stmt;
+ t = GIMPLE_STMT_OPERAND (stmt1, 1);
+ }
+
+ /* Verify that B is loop invariant but A is not. Verify that with
+ all the stmt walking we are still in the same loop. */
+ if (TREE_CODE (t) == RSHIFT_EXPR
+ && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
+ && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
+ loop_containing_stmt (stmt1)) != NULL
+ && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
+ loop_containing_stmt (stmt1)) == NULL)
+ {
+ tree a = TREE_OPERAND (t, 0);
+ tree b = TREE_OPERAND (t, 1);
+
+ /* 1 << B */
+ var = create_tmp_var (TREE_TYPE (a), "shifttmp");
+ add_referenced_var (var);
+ t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
+ build_int_cst (TREE_TYPE (a), 1), b);
+ stmt1 = build_gimple_modify_stmt (var, t);
+ name = make_ssa_name (var, stmt1);
+ GIMPLE_STMT_OPERAND (stmt1, 0) = name;
+
+ /* A & (1 << B) */
+ t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
+ stmt2 = build_gimple_modify_stmt (var, t);
+ name = make_ssa_name (var, stmt2);
+ GIMPLE_STMT_OPERAND (stmt2, 0) = name;
+ SET_USE (use, name);
+
+ bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
+ bsi_replace (bsi, stmt2, true);
+
+ return stmt1;
+ }
+
+ return stmt;
+}
+
+
/* Determine the outermost loops in that statements in basic block BB are
invariant, and record them to the LIM_DATA associated with the statements.
Callback for walk_dominator_tree. */
bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
- if (!bb->loop_father->outer)
+ if (!loop_outer (bb->loop_father))
return;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
- bb->index, bb->loop_father->num, bb->loop_father->depth);
+ bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
continue;
}
- /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
- to be hoisted out of loop, saving expensive divide. */
- if (pos == MOVE_POSSIBLE
- && (rhs = TREE_OPERAND (stmt, 1)) != NULL
- && TREE_CODE (rhs) == RDIV_EXPR
- && flag_unsafe_math_optimizations
- && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
- loop_containing_stmt (stmt)) != NULL
- && outermost_invariant_loop_expr (rhs,
- loop_containing_stmt (stmt)) == NULL)
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
{
- tree lhs, stmt1, stmt2, var, name;
-
- lhs = TREE_OPERAND (stmt, 0);
-
- /* stmt must be MODIFY_EXPR. */
- var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
- add_referenced_tmp_var (var);
-
- stmt1 = build2 (MODIFY_EXPR, void_type_node, var,
- build2 (RDIV_EXPR, TREE_TYPE (rhs),
- build_real (TREE_TYPE (rhs), dconst1),
- TREE_OPERAND (rhs, 1)));
- name = make_ssa_name (var, stmt1);
- TREE_OPERAND (stmt1, 0) = name;
- stmt2 = build2 (MODIFY_EXPR, void_type_node, lhs,
- build2 (MULT_EXPR, TREE_TYPE (rhs),
- name, TREE_OPERAND (rhs, 0)));
-
- /* Replace division stmt with reciprocal and multiply stmts.
- The multiply stmt is not invariant, so update iterator
- and avoid rescanning. */
- bsi_replace (&bsi, stmt1, true);
- bsi_insert_after (&bsi, stmt2, BSI_NEW_STMT);
- SSA_NAME_DEF_STMT (lhs) = stmt2;
-
- /* Continue processing with invariant reciprocal statment. */
- stmt = stmt1;
+ rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+
+ /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
+ to be hoisted out of loop, saving expensive divide. */
+ if (pos == MOVE_POSSIBLE
+ && TREE_CODE (rhs) == RDIV_EXPR
+ && flag_unsafe_math_optimizations
+ && !flag_trapping_math
+ && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
+ loop_containing_stmt (stmt)) != NULL
+ && outermost_invariant_loop_expr (rhs,
+ loop_containing_stmt (stmt)) == NULL)
+ stmt = rewrite_reciprocal (&bsi);
+
+ /* If the shift count is invariant, convert (A >> B) & 1 to
+ A & (1 << B) allowing the bit mask to be hoisted out of the loop
+ saving an expensive shift. */
+ if (pos == MOVE_POSSIBLE
+ && TREE_CODE (rhs) == BIT_AND_EXPR
+ && integer_onep (TREE_OPERAND (rhs, 1))
+ && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
+ && has_single_use (TREE_OPERAND (rhs, 0)))
+ stmt = rewrite_bittest (&bsi);
}
- stmt_ann (stmt)->aux = xcalloc (1, sizeof (struct lim_aux_data));
+ stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
LIM_DATA (stmt)->always_executed_in = outermost;
if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
{
print_generic_stmt_indented (dump_file, stmt, 0, 2);
fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
- LIM_DATA (stmt)->max_loop->depth,
+ loop_depth (LIM_DATA (stmt)->max_loop),
LIM_DATA (stmt)->cost);
}
struct dom_walk_data walk_data;
memset (&walk_data, 0, sizeof (struct dom_walk_data));
+ walk_data.dom_direction = CDI_DOMINATORS;
walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
init_walk_dominator_tree (&walk_data);
fini_walk_dominator_tree (&walk_data);
}
-/* Commits edge insertions and updates loop structures. */
-
-void
-loop_commit_inserts (void)
-{
- unsigned old_last_basic_block, i;
- basic_block bb;
-
- old_last_basic_block = last_basic_block;
- bsi_commit_edge_inserts ();
- for (i = old_last_basic_block; i < (unsigned) last_basic_block; i++)
- {
- bb = BASIC_BLOCK (i);
- add_bb_to_loop (bb,
- find_common_loop (single_pred (bb)->loop_father,
- single_succ (bb)->loop_father));
- }
-}
-
/* Hoist the statements in basic block BB out of the loops prescribed by
data stored in LIM_DATA structures associated with each statement. Callback
for walk_dominator_tree. */
tree stmt;
unsigned cost = 0;
- if (!bb->loop_father->outer)
+ if (!loop_outer (bb->loop_father))
return;
for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
cost = LIM_DATA (stmt)->cost;
level = LIM_DATA (stmt)->tgt_loop;
free_lim_aux_data (LIM_DATA (stmt));
- stmt_ann (stmt)->aux = NULL;
+ stmt_ann (stmt)->common.aux = NULL;
if (!level)
{
cost, level->num);
}
bsi_insert_on_edge (loop_preheader_edge (level), stmt);
- bsi_remove (&bsi);
+ bsi_remove (&bsi, false);
}
}
struct dom_walk_data walk_data;
memset (&walk_data, 0, sizeof (struct dom_walk_data));
+ walk_data.dom_direction = CDI_DOMINATORS;
walk_data.before_dom_children_before_stmts = move_computations_stmt;
init_walk_dominator_tree (&walk_data);
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
fini_walk_dominator_tree (&walk_data);
- loop_commit_inserts ();
+ bsi_commit_edge_inserts ();
if (need_ssa_update_p ())
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
}
if (class != tcc_unary
&& class != tcc_binary
&& class != tcc_expression
+ && class != tcc_vl_exp
&& class != tcc_comparison)
return;
- nops = TREE_CODE_LENGTH (TREE_CODE (expr));
+ nops = TREE_OPERAND_LENGTH (expr);
for (i = 0; i < nops; i++)
force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
}
static void
record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
{
- struct mem_ref_loc *aref = xmalloc (sizeof (struct mem_ref_loc));
+ struct mem_ref_loc *aref = XNEW (struct mem_ref_loc);
aref->stmt = stmt;
aref->ref = ref;
}
}
+/* The name and the length of the currently generated variable
+ for lsm. */
+#define MAX_LSM_NAME_LENGTH 40
+static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
+static int lsm_tmp_name_length;
+
+/* Adds S to lsm_tmp_name. */
+
+static void
+lsm_tmp_name_add (const char *s)
+{
+ int l = strlen (s) + lsm_tmp_name_length;
+ if (l > MAX_LSM_NAME_LENGTH)
+ return;
+
+ strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
+ lsm_tmp_name_length = l;
+}
+
+/* Stores the name for temporary variable that replaces REF to
+ lsm_tmp_name. */
+
+static void
+gen_lsm_tmp_name (tree ref)
+{
+ const char *name;
+
+ switch (TREE_CODE (ref))
+ {
+ case MISALIGNED_INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case INDIRECT_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_");
+ break;
+
+ case BIT_FIELD_REF:
+ case VIEW_CONVERT_EXPR:
+ case ARRAY_RANGE_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ break;
+
+ case REALPART_EXPR:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_RE");
+ break;
+
+ case IMAGPART_EXPR:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_IM");
+ break;
+
+ case COMPONENT_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_");
+ name = get_name (TREE_OPERAND (ref, 1));
+ if (!name)
+ name = "F";
+ lsm_tmp_name_add ("_");
+ lsm_tmp_name_add (name);
+
+ case ARRAY_REF:
+ gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+ lsm_tmp_name_add ("_I");
+ break;
+
+ case SSA_NAME:
+ ref = SSA_NAME_VAR (ref);
+ /* Fallthru. */
+
+ case VAR_DECL:
+ case PARM_DECL:
+ name = get_name (ref);
+ if (!name)
+ name = "D";
+ lsm_tmp_name_add (name);
+ break;
+
+ case STRING_CST:
+ lsm_tmp_name_add ("S");
+ break;
+
+ case RESULT_DECL:
+ lsm_tmp_name_add ("R");
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+}
+
+/* Determines name for temporary variable that replaces REF.
+ The name is accumulated into the lsm_tmp_name variable.
+ N is added to the name of the temporary. */
+
+char *
+get_lsm_tmp_name (tree ref, unsigned n)
+{
+ char ns[2];
+
+ lsm_tmp_name_length = 0;
+ gen_lsm_tmp_name (ref);
+ lsm_tmp_name_add ("_lsm");
+ if (n < 10)
+ {
+ ns[0] = '0' + n;
+ ns[1] = 0;
+ lsm_tmp_name_add (ns);
+ }
+ return lsm_tmp_name;
+}
+
/* Records request for store motion of memory reference REF from LOOP.
MEM_REFS is the list of occurrences of the reference REF inside LOOP;
these references are rewritten by a new temporary variable.
- Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
- The initialization of the temporary variable is put to the preheader
- of the loop, and assignments to the reference from the temporary variable
- are emitted to exits. */
+ Exits from the LOOP are stored in EXITS. The initialization of the
+ temporary variable is put to the preheader of the loop, and assignments
+ to the reference from the temporary variable are emitted to exits. */
static void
-schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
+schedule_sm (struct loop *loop, VEC (edge, heap) *exits, tree ref,
struct mem_ref_loc *mem_refs)
{
struct mem_ref_loc *aref;
unsigned i;
tree load, store;
struct fmt_data fmt_data;
+ edge ex;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " from loop %d\n", loop->num);
}
- tmp_var = make_rename_temp (TREE_TYPE (ref), "lsm_tmp");
+ tmp_var = make_rename_temp (TREE_TYPE (ref),
+ get_lsm_tmp_name (ref, ~0));
fmt_data.loop = loop;
fmt_data.orig_loop = loop;
LIM_DATA (aref->stmt)->sm_done = true;
/* Emit the load & stores. */
- load = build (MODIFY_EXPR, void_type_node, tmp_var, ref);
- get_stmt_ann (load)->aux = xcalloc (1, sizeof (struct lim_aux_data));
+ load = build_gimple_modify_stmt (tmp_var, ref);
+ get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
LIM_DATA (load)->max_loop = loop;
LIM_DATA (load)->tgt_loop = loop;
all dependencies. */
bsi_insert_on_edge (loop_latch_edge (loop), load);
- for (i = 0; i < n_exits; i++)
+ for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
{
- store = build (MODIFY_EXPR, void_type_node,
- unshare_expr (ref), tmp_var);
- bsi_insert_on_edge (exits[i], store);
+ store = build_gimple_modify_stmt (unshare_expr (ref), tmp_var);
+ bsi_insert_on_edge (ex, store);
}
}
is true, prepare the statements that load the value of the memory reference
to a temporary variable in the loop preheader, store it back on the loop
exits, and replace all the references inside LOOP by this temporary variable.
- LOOP has N_EXITS stored in EXITS. CLOBBERED_VOPS is the bitmap of virtual
+ EXITS is the list of exits of LOOP. CLOBBERED_VOPS is the bitmap of virtual
operands that are clobbered by a call or accessed through multiple references
in loop. */
static void
-determine_lsm_ref (struct loop *loop, edge *exits, unsigned n_exits,
+determine_lsm_ref (struct loop *loop, VEC (edge, heap) *exits,
bitmap clobbered_vops, struct mem_ref *ref)
{
struct mem_ref_loc *aref;
return;
}
- schedule_sm (loop, exits, n_exits, ref->mem, ref->locs);
+ schedule_sm (loop, exits, ref->mem, ref->locs);
}
-/* Attempts to hoist memory reference described in SLOT out of loop
- described in DATA. Callback for htab_traverse. */
-
-struct hmr_data
-{
- struct loop *loop; /* Loop from that the reference should be hoisted. */
- edge *exits; /* Exits of the loop. */
- unsigned n_exits; /* Length of the exits array. */
- bitmap clobbered_vops;/* The vops clobbered by call in loop or accessed by
- multiple memory references. */
-};
+/* Hoists memory references MEM_REFS out of LOOP. CLOBBERED_VOPS is the list
+ of vops clobbered by call in loop or accessed by multiple memory references.
+ EXITS is the list of exit edges of the LOOP. */
-static int
-hoist_memory_reference (void **slot, void *data)
+static void
+hoist_memory_references (struct loop *loop, struct mem_ref *mem_refs,
+ bitmap clobbered_vops, VEC (edge, heap) *exits)
{
- struct mem_ref *ref = *slot;
- struct hmr_data *hmr_data = data;
-
- determine_lsm_ref (hmr_data->loop, hmr_data->exits, hmr_data->n_exits,
- hmr_data->clobbered_vops, ref);
+ struct mem_ref *ref;
- return 1;
+ for (ref = mem_refs; ref; ref = ref->next)
+ determine_lsm_ref (loop, exits, clobbered_vops, ref);
}
-/* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
+/* Checks whether LOOP (with exits stored in EXITS array) is suitable
for a store motion optimization (i.e. whether we can insert statement
on its exits). */
static bool
-loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, edge *exits,
- unsigned n_exits)
+loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
+ VEC (edge, heap) *exits)
{
unsigned i;
+ edge ex;
- for (i = 0; i < n_exits; i++)
- if (exits[i]->flags & EDGE_ABNORMAL)
+ for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
+ if (ex->flags & EDGE_ABNORMAL)
return false;
return true;
return operand_equal_p (mem1->mem, (tree) obj2, 0);
}
-/* A function to free the struct mem_ref object OBJ. */
-
-static void
-memref_del (void *obj)
-{
- struct mem_ref *mem = obj;
-
- free_mem_ref_locs (mem->locs);
- BITMAP_FREE (mem->vops);
- free (mem);
-}
-
/* Gathers memory references in statement STMT in LOOP, storing the
information about them in MEM_REFS hash table. Note vops accessed through
- unrecognized statements in CLOBBERED_VOPS. */
+ unrecognized statements in CLOBBERED_VOPS. The newly created references
+ are also stored to MEM_REF_LIST. */
static void
gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
- bitmap clobbered_vops, tree stmt)
+ bitmap clobbered_vops, tree stmt,
+ struct mem_ref **mem_ref_list)
{
tree *lhs, *rhs, *mem = NULL;
hashval_t hash;
return;
/* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
- if (TREE_CODE (stmt) != MODIFY_EXPR)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
goto fail;
- lhs = &TREE_OPERAND (stmt, 0);
- rhs = &TREE_OPERAND (stmt, 1);
+ lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
+ rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
if (TREE_CODE (*lhs) == SSA_NAME)
{
ref = *slot;
else
{
- ref = xmalloc (sizeof (struct mem_ref));
+ ref = XNEW (struct mem_ref);
ref->mem = *mem;
ref->hash = hash;
ref->locs = NULL;
ref->is_stored = false;
ref->vops = BITMAP_ALLOC (NULL);
+ ref->next = *mem_ref_list;
+ *mem_ref_list = ref;
*slot = ref;
}
ref->is_stored |= is_stored;
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
- {
- bitmap_set_bit (ref->vops,
- var_ann (SSA_NAME_VAR (vname))->uid);
- }
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
+ bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
record_mem_ref_loc (&ref->locs, stmt, mem);
return;
fail:
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
- SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
- {
- bitmap_set_bit (clobbered_vops,
- var_ann (SSA_NAME_VAR (vname))->uid);
- }
+ FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
+ bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
}
-/* Gathers memory references in LOOP, storing the information about them
- in MEM_REFS hash table. Note vops accessed through unrecognized
- statements in CLOBBERED_VOPS. */
+/* Gathers memory references in LOOP. Notes vops accessed through unrecognized
+ statements in CLOBBERED_VOPS. The list of the references found by
+ the function is returned. */
-static void
-gather_mem_refs (struct loop *loop, htab_t mem_refs, bitmap clobbered_vops)
+static struct mem_ref *
+gather_mem_refs (struct loop *loop, bitmap clobbered_vops)
{
basic_block *body = get_loop_body (loop);
block_stmt_iterator bsi;
unsigned i;
+ struct mem_ref *mem_ref_list = NULL;
+ htab_t mem_refs = htab_create (100, memref_hash, memref_eq, NULL);
for (i = 0; i < loop->num_nodes; i++)
{
for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
- gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi));
+ gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi),
+ &mem_ref_list);
}
free (body);
+
+ htab_delete (mem_refs);
+ return mem_ref_list;
}
-/* Finds the vops accessed by memory reference described in SLOT as well as
- some other reference(s) and marks them in DATA->clobbered_vops.
- Callback for htab_traverse. */
+/* Finds the vops accessed by more than one of the memory references described
+ in MEM_REFS and marks them in CLOBBERED_VOPS. */
-struct fmrv_data
+static void
+find_more_ref_vops (struct mem_ref *mem_refs, bitmap clobbered_vops)
{
- bitmap clobbered_vops; /* The vops clobbered by call in loop or accessed by
- multiple memory references. */
- bitmap all_vops; /* All vops referenced in the loop. */
-};
+ bitmap_head tmp, all_vops;
+ struct mem_ref *ref;
-static int
-find_more_ref_vops (void **slot, void *data)
+ bitmap_initialize (&tmp, &bitmap_default_obstack);
+ bitmap_initialize (&all_vops, &bitmap_default_obstack);
+
+ for (ref = mem_refs; ref; ref = ref->next)
+ {
+ /* The vops that are already in all_vops are accessed by more than
+ one memory reference. */
+ bitmap_and (&tmp, &all_vops, ref->vops);
+ bitmap_ior_into (clobbered_vops, &tmp);
+ bitmap_clear (&tmp);
+
+ bitmap_ior_into (&all_vops, ref->vops);
+ }
+
+ bitmap_clear (&all_vops);
+}
+
+/* Releases the memory occupied by REF. */
+
+static void
+free_mem_ref (struct mem_ref *ref)
{
- struct mem_ref *ref = *slot;
- struct fmrv_data *fmrv_data = data;
- bitmap_head tmp;
+ free_mem_ref_locs (ref->locs);
+ BITMAP_FREE (ref->vops);
+ free (ref);
+}
- /* The vops that are already in all_vops are accessed by more than
- one memory reference. */
- bitmap_initialize (&tmp, &bitmap_default_obstack);
- bitmap_and (&tmp, fmrv_data->all_vops, ref->vops);
- bitmap_ior_into (fmrv_data->clobbered_vops, &tmp);
- bitmap_clear (&tmp);
+/* Releases the memory occupied by REFS. */
+
+static void
+free_mem_refs (struct mem_ref *refs)
+{
+ struct mem_ref *ref, *next;
- bitmap_ior_into (fmrv_data->all_vops, ref->vops);
- return 1;
+ for (ref = refs; ref; ref = next)
+ {
+ next = ref->next;
+ free_mem_ref (ref);
+ }
}
/* Try to perform store motion for all memory references modified inside
static void
determine_lsm_loop (struct loop *loop)
{
- unsigned n_exits;
- edge *exits = get_loop_exit_edges (loop, &n_exits);
- htab_t mem_refs;
- struct hmr_data hmr_data;
- struct fmrv_data fmrv_data;
+ VEC (edge, heap) *exits = get_loop_exit_edges (loop);
bitmap clobbered_vops;
+ struct mem_ref *mem_refs;
- if (!loop_suitable_for_sm (loop, exits, n_exits))
+ if (!loop_suitable_for_sm (loop, exits))
{
- free (exits);
+ VEC_free (edge, heap, exits);
return;
}
- mem_refs = htab_create (100, memref_hash, memref_eq, memref_del);
-
/* Find the memory references in LOOP. */
clobbered_vops = BITMAP_ALLOC (NULL);
- gather_mem_refs (loop, mem_refs, clobbered_vops);
+ mem_refs = gather_mem_refs (loop, clobbered_vops);
/* Find the vops that are used for more than one reference. */
- fmrv_data.all_vops = BITMAP_ALLOC (NULL);
- fmrv_data.clobbered_vops = clobbered_vops;
- htab_traverse (mem_refs, find_more_ref_vops, &fmrv_data);
- BITMAP_FREE (fmrv_data.all_vops);
+ find_more_ref_vops (mem_refs, clobbered_vops);
/* Hoist all suitable memory references. */
- hmr_data.loop = loop;
- hmr_data.exits = exits;
- hmr_data.n_exits = n_exits;
- hmr_data.clobbered_vops = clobbered_vops;
- htab_traverse (mem_refs, hoist_memory_reference, &hmr_data);
+ hoist_memory_references (loop, mem_refs, clobbered_vops, exits);
- htab_delete (mem_refs);
- free (exits);
+ free_mem_refs (mem_refs);
+ VEC_free (edge, heap, exits);
BITMAP_FREE (clobbered_vops);
}
/* Try to perform store motion for all memory references modified inside
- any of LOOPS. */
+ loops. */
static void
-determine_lsm (struct loops *loops)
+determine_lsm (void)
{
struct loop *loop;
-
- if (!loops->tree_root->inner)
- return;
+ loop_iterator li;
/* Pass the loops from the outermost and perform the store motion as
suitable. */
- loop = loops->tree_root->inner;
- while (1)
+ FOR_EACH_LOOP (li, loop, 0)
{
determine_lsm_loop (loop);
-
- if (loop->inner)
- {
- loop = loop->inner;
- continue;
- }
- while (!loop->next)
- {
- loop = loop->outer;
- if (loop == loops->tree_root)
- {
- loop_commit_inserts ();
- return;
- }
- }
- loop = loop->next;
}
+
+ bsi_commit_edge_inserts ();
}
/* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
fill_always_executed_in (loop, contains_call);
}
-/* Compute the global information needed by the loop invariant motion pass.
- LOOPS is the loop tree. */
+/* Compute the global information needed by the loop invariant motion pass. */
static void
-tree_ssa_lim_initialize (struct loops *loops)
+tree_ssa_lim_initialize (void)
{
sbitmap contains_call = sbitmap_alloc (last_basic_block);
block_stmt_iterator bsi;
SET_BIT (contains_call, bb->index);
}
- for (loop = loops->tree_root->inner; loop; loop = loop->next)
+ for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
fill_always_executed_in (loop, contains_call);
sbitmap_free (contains_call);
}
}
-/* Moves invariants from LOOPS. Only "expensive" invariants are moved out --
+/* Moves invariants from loops. Only "expensive" invariants are moved out --
i.e. those that are likely to be win regardless of the register pressure. */
void
-tree_ssa_lim (struct loops *loops)
+tree_ssa_lim (void)
{
- tree_ssa_lim_initialize (loops);
+ tree_ssa_lim_initialize ();
/* For each statement determine the outermost loop in that it is
invariant and cost for computing the invariant. */
/* For each memory reference determine whether it is possible to hoist it
out of the loop. Force the necessary invariants to be moved out of the
loops as well. */
- determine_lsm (loops);
+ determine_lsm ();
/* Move the expressions that are expensive enough. */
move_computations ();