vec<cond_equivalence> cond_equivalences;
};
-/* Unwindable equivalences, both const/copy and expression varieties. */
-static avail_exprs_stack *avail_exprs_stack;
-
/* Track whether or not we have changed the control flow graph. */
static bool cfg_altered;
/* Local functions. */
static void optimize_stmt (basic_block, gimple_stmt_iterator,
- class const_and_copies *);
-static tree lookup_avail_expr (gimple, bool);
-static void record_cond (cond_equivalence *);
+ class const_and_copies *,
+ class avail_exprs_stack *);
+static tree lookup_avail_expr (gimple, bool, class avail_exprs_stack *);
+static void record_cond (cond_equivalence *, class avail_exprs_stack *);
static void record_equality (tree, tree, class const_and_copies *);
static void record_equivalences_from_phis (basic_block);
static void record_equivalences_from_incoming_edge (basic_block,
- class const_and_copies *);
+ class const_and_copies *,
+ class avail_exprs_stack *);
static void eliminate_redundant_computations (gimple_stmt_iterator *,
- class const_and_copies *);
-static void record_equivalences_from_stmt (gimple, int);
+ class const_and_copies *,
+ class avail_exprs_stack *);
+static void record_equivalences_from_stmt (gimple, int,
+ class avail_exprs_stack *);
static edge single_incoming_edge_ignoring_loop_edges (basic_block);
static void dump_dominator_optimization_stats (FILE *file,
hash_table<expr_elt_hasher> *);
{
public:
dom_opt_dom_walker (cdi_direction direction,
- class const_and_copies *const_and_copies)
+ class const_and_copies *const_and_copies,
+ class avail_exprs_stack *avail_exprs_stack)
: dom_walker (direction),
m_const_and_copies (const_and_copies),
+ m_avail_exprs_stack (avail_exprs_stack),
m_dummy_cond (NULL) {}
virtual void before_dom_children (basic_block);
/* Unwindable equivalences, both const/copy and expression varieties. */
class const_and_copies *m_const_and_copies;
+ class avail_exprs_stack *m_avail_exprs_stack;
gcond *m_dummy_cond;
};
/* Create our hash tables. */
hash_table<expr_elt_hasher> *avail_exprs
= new hash_table<expr_elt_hasher> (1024);
- avail_exprs_stack = new class avail_exprs_stack (avail_exprs);
+ class avail_exprs_stack *avail_exprs_stack
+ = new class avail_exprs_stack (avail_exprs);
class const_and_copies *const_and_copies = new class const_and_copies ();
need_eh_cleanup = BITMAP_ALLOC (NULL);
need_noreturn_fixup.create (0);
record_edge_info (bb);
/* Recursively walk the dominator tree optimizing statements. */
- dom_opt_dom_walker walker (CDI_DOMINATORS, const_and_copies);
+ dom_opt_dom_walker walker (CDI_DOMINATORS,
+ const_and_copies,
+ avail_exprs_stack);
walker.walk (fun->cfg->x_entry_block_ptr);
{
threading code with a simple API for simplifying statements. */
static tree
simplify_stmt_for_jump_threading (gimple stmt,
- gimple within_stmt ATTRIBUTE_UNUSED)
+ gimple within_stmt ATTRIBUTE_UNUSED,
+ class avail_exprs_stack *avail_exprs_stack)
{
- return lookup_avail_expr (stmt, false);
+ return lookup_avail_expr (stmt, false, avail_exprs_stack);
}
/* Valueize hook for gimple_fold_stmt_to_constant_1. */
return t;
}
-/* Record into the equivalence tables any equivalences implied by
- traversing edge E (which are cached in E->aux).
+/* Record into CONST_AND_COPIES and AVAIL_EXPRS_STACK any equivalences implied
+ by traversing edge E (which are cached in E->aux).
Callers are responsible for managing the unwinding markers. */
static void
record_temporary_equivalences (edge e,
- class const_and_copies *const_and_copies)
+ class const_and_copies *const_and_copies,
+ class avail_exprs_stack *avail_exprs_stack)
{
int i;
struct edge_info *edge_info = (struct edge_info *) e->aux;
/* If we have 0 = COND or 1 = COND equivalences, record them
into our expression hash tables. */
for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
- record_cond (eq);
+ record_cond (eq, avail_exprs_stack);
}
}
/* Push a marker on both stacks so we can unwind the tables back to their
current state. */
- avail_exprs_stack->push_marker ();
+ m_avail_exprs_stack->push_marker ();
m_const_and_copies->push_marker ();
/* Traversing E may result in equivalences we can utilize. */
- record_temporary_equivalences (e, m_const_and_copies);
+ record_temporary_equivalences (e, m_const_and_copies, m_avail_exprs_stack);
/* With all the edge equivalences in the tables, go ahead and attempt
to thread through E->dest. */
::thread_across_edge (m_dummy_cond, e, false,
- m_const_and_copies, avail_exprs_stack,
+ m_const_and_copies, m_avail_exprs_stack,
simplify_stmt_for_jump_threading);
/* And restore the various tables to their state before
XXX The code in tree-ssa-threadedge.c will restore the state of
the const_and_copies table. We we just have to restore the expression
table. */
- avail_exprs_stack->pop_to_marker ();
+ m_avail_exprs_stack->pop_to_marker ();
}
/* PHI nodes can create equivalences too.
return retval;
}
-/* Record any equivalences created by the incoming edge to BB. If BB
- has more than one incoming edge, then no equivalence is created. */
+/* Record any equivalences created by the incoming edge to BB into
+ CONST_AND_COPIES and AVAIL_EXPRS_STACK. If BB has more than one
+ incoming edge, then no equivalence is created. */
static void
record_equivalences_from_incoming_edge (basic_block bb,
- class const_and_copies *const_and_copies)
+ class const_and_copies *const_and_copies,
+ class avail_exprs_stack *avail_exprs_stack)
{
edge e;
basic_block parent;
/* If we had a single incoming edge from our parent block, then enter
any data associated with the edge into our tables. */
if (e && e->src == parent)
- record_temporary_equivalences (e, const_and_copies);
+ record_temporary_equivalences (e, const_and_copies, avail_exprs_stack);
}
/* Dump statistics for the hash table HTAB. */
}
-/* Enter condition equivalence into the expression hash table.
+/* Enter condition equivalence P into AVAIL_EXPRS_HASH.
+
This indicates that a conditional expression has a known
boolean value. */
static void
-record_cond (cond_equivalence *p)
+record_cond (cond_equivalence *p,
+ class avail_exprs_stack *avail_exprs_stack)
{
class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
expr_hash_elt **slot;
if (*slot == NULL)
{
*slot = element;
-
avail_exprs_stack->record_expr (element, NULL, '1');
}
else
/* Push a marker on the stacks of local information so that we know how
far to unwind when we finalize this block. */
- avail_exprs_stack->push_marker ();
+ m_avail_exprs_stack->push_marker ();
m_const_and_copies->push_marker ();
- record_equivalences_from_incoming_edge (bb, m_const_and_copies);
+ record_equivalences_from_incoming_edge (bb, m_const_and_copies,
+ m_avail_exprs_stack);
/* PHI nodes can create equivalences too. */
record_equivalences_from_phis (bb);
/* Create equivalences from redundant PHIs. PHIs are only truly
redundant when they exist in the same block, so push another
marker and unwind right afterwards. */
- avail_exprs_stack->push_marker ();
+ m_avail_exprs_stack->push_marker ();
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- eliminate_redundant_computations (&gsi, m_const_and_copies);
- avail_exprs_stack->pop_to_marker ();
+ eliminate_redundant_computations (&gsi, m_const_and_copies,
+ m_avail_exprs_stack);
+ m_avail_exprs_stack->pop_to_marker ();
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- optimize_stmt (bb, gsi, m_const_and_copies);
+ optimize_stmt (bb, gsi, m_const_and_copies, m_avail_exprs_stack);
/* Now prepare to process dominated blocks. */
record_edge_info (bb);
}
/* These remove expressions local to BB from the tables. */
- avail_exprs_stack->pop_to_marker ();
+ m_avail_exprs_stack->pop_to_marker ();
m_const_and_copies->pop_to_marker ();
}
/* Search for redundant computations in STMT. If any are found, then
replace them with the variable holding the result of the computation.
- If safe, record this expression into the available expression hash
- table. */
+ If safe, record this expression into AVAIL_EXPRS_STACK and
+ CONST_AND_COPIES. */
static void
eliminate_redundant_computations (gimple_stmt_iterator* gsi,
- class const_and_copies *const_and_copies)
+ class const_and_copies *const_and_copies,
+ class avail_exprs_stack *avail_exprs_stack)
{
tree expr_type;
tree cached_lhs;
insert = false;
/* Check if the expression has been computed before. */
- cached_lhs = lookup_avail_expr (stmt, insert);
+ cached_lhs = lookup_avail_expr (stmt, insert, avail_exprs_stack);
opt_stats.num_exprs_considered++;
/* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
the available expressions table or the const_and_copies table.
- Detect and record those equivalences. */
-/* We handle only very simple copy equivalences here. The heavy
+ Detect and record those equivalences into AVAIL_EXPRS_STACK.
+
+ We handle only very simple copy equivalences here. The heavy
lifing is done by eliminate_redundant_computations. */
static void
-record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
+record_equivalences_from_stmt (gimple stmt, int may_optimize_p,
+ class avail_exprs_stack *avail_exprs_stack)
{
tree lhs;
enum tree_code lhs_code;
/* Finally enter the statement into the available expression
table. */
- lookup_avail_expr (new_stmt, true);
+ lookup_avail_expr (new_stmt, true, avail_exprs_stack);
}
}
cprop_operand (stmt, op_p);
}
-/* Optimize the statement pointed to by iterator SI.
+/* Optimize the statement in block BB pointed to by iterator SI
+ using equivalences from CONST_AND_COPIES and AVAIL_EXPRS_STACK.
We try to perform some simplistic global redundancy elimination and
constant propagation:
static void
optimize_stmt (basic_block bb, gimple_stmt_iterator si,
- class const_and_copies *const_and_copies)
+ class const_and_copies *const_and_copies,
+ class avail_exprs_stack *avail_exprs_stack)
{
gimple stmt, old_stmt;
bool may_optimize_p;
}
update_stmt_if_modified (stmt);
- eliminate_redundant_computations (&si, const_and_copies);
+ eliminate_redundant_computations (&si, const_and_copies,
+ avail_exprs_stack);
stmt = gsi_stmt (si);
/* Perform simple redundant store elimination. */
else
new_stmt = gimple_build_assign (rhs, lhs);
gimple_set_vuse (new_stmt, gimple_vuse (stmt));
- cached_lhs = lookup_avail_expr (new_stmt, false);
+ cached_lhs = lookup_avail_expr (new_stmt, false, avail_exprs_stack);
if (cached_lhs
&& rhs == cached_lhs)
{
/* Record any additional equivalences created by this statement. */
if (is_gimple_assign (stmt))
- record_equivalences_from_stmt (stmt, may_optimize_p);
+ record_equivalences_from_stmt (stmt, may_optimize_p, avail_exprs_stack);
/* If STMT is a COND_EXPR and it was modified, then we may know
where it goes. If that is the case, then mark the CFG as altered.
return NULL;
}
-/* Search for an existing instance of STMT in the AVAIL_EXPRS table.
+/* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
If found, return its LHS. Otherwise insert STMT in the table and
return NULL_TREE.
we finish processing this block and its children. */
static tree
-lookup_avail_expr (gimple stmt, bool insert)
+lookup_avail_expr (gimple stmt, bool insert,
+ class avail_exprs_stack *avail_exprs_stack)
{
expr_hash_elt **slot;
tree lhs;
/* Array to record value-handles per SSA_NAME. */
vec<tree> ssa_name_values;
+typedef tree (pfn_simplify) (gimple, gimple, class avail_exprs_stack *);
+
/* Set the value for the SSA name NAME to VALUE. */
void
static gimple
record_temporary_equivalences_from_stmts_at_dest (edge e,
- const_and_copies *const_and_copies,
- tree (*simplify) (gimple,
- gimple),
- bool backedge_seen)
+ const_and_copies *const_and_copies,
+ avail_exprs_stack *avail_exprs_stack,
+ pfn_simplify simplify,
+ bool backedge_seen)
{
gimple stmt = NULL;
gimple_stmt_iterator gsi;
if (!cached_lhs
|| (TREE_CODE (cached_lhs) != SSA_NAME
&& !is_gimple_min_invariant (cached_lhs)))
- cached_lhs = (*simplify) (stmt, stmt);
+ cached_lhs = (*simplify) (stmt, stmt, avail_exprs_stack);
/* Restore the statement's original uses/defs. */
i = 0;
necessarily valid. We use this callback rather than the ones provided by
DOM/VRP to achieve that effect. */
static tree
-dummy_simplify (gimple stmt1 ATTRIBUTE_UNUSED, gimple stmt2 ATTRIBUTE_UNUSED)
+dummy_simplify (gimple stmt1 ATTRIBUTE_UNUSED, gimple stmt2 ATTRIBUTE_UNUSED,
+ class avail_exprs_stack *avail_exprs_stack ATTRIBUTE_UNUSED)
{
return NULL_TREE;
}
a condition using pass specific information.
Return the simplified condition or NULL if simplification could
- not be performed. */
+ not be performed.
+
+ The available expression table is referenced via AVAIL_EXPRS_STACK. */
static tree
simplify_control_stmt_condition (edge e,
gimple stmt,
+ class avail_exprs_stack *avail_exprs_stack,
gcond *dummy_cond,
- tree (*simplify) (gimple, gimple),
+ pfn_simplify simplify,
bool handle_dominating_asserts)
{
tree cond, cached_lhs;
then use the pass specific callback to simplify the condition. */
if (!cached_lhs
|| !is_gimple_min_invariant (cached_lhs))
- cached_lhs = (*simplify) (dummy_cond, stmt);
+ cached_lhs = (*simplify) (dummy_cond, stmt, avail_exprs_stack);
/* If we were just testing that an integral type was != 0, and that
failed, just return the first operand. This gives the FSM code a
/* If we haven't simplified to an invariant yet, then use the
pass specific callback to try and simplify it further. */
if (cached_lhs && ! is_gimple_min_invariant (cached_lhs))
- cached_lhs = (*simplify) (stmt, stmt);
+ cached_lhs = (*simplify) (stmt, stmt, avail_exprs_stack);
/* We couldn't find an invariant. But, callers of this
function may be able to do something useful with the
return false.
DUMMY_COND, HANDLE_DOMINATING_ASSERTS and SIMPLIFY are used to
- try and simplify the condition at the end of TAKEN_EDGE->dest. */
+ try and simplify the condition at the end of TAKEN_EDGE->dest.
+
+ The available expression table is referenced via AVAIL_EXPRS_STACK. */
+
static bool
thread_around_empty_blocks (edge taken_edge,
gcond *dummy_cond,
+ class avail_exprs_stack *avail_exprs_stack,
bool handle_dominating_asserts,
- tree (*simplify) (gimple, gimple),
+ pfn_simplify simplify,
bitmap visited,
vec<jump_thread_edge *> *path,
bool *backedge_seen_p)
simplify = dummy_simplify;
return thread_around_empty_blocks (taken_edge,
dummy_cond,
+ avail_exprs_stack,
handle_dominating_asserts,
simplify,
visited,
simplify = dummy_simplify;
/* Extract and simplify the condition. */
- cond = simplify_control_stmt_condition (taken_edge, stmt, dummy_cond,
+ cond = simplify_control_stmt_condition (taken_edge, stmt,
+ avail_exprs_stack, dummy_cond,
simplify, handle_dominating_asserts);
/* If the condition can be statically computed and we have not already
thread_around_empty_blocks (taken_edge,
dummy_cond,
+ avail_exprs_stack,
handle_dominating_asserts,
simplify,
visited,
gcond *dummy_cond,
bool handle_dominating_asserts,
const_and_copies *const_and_copies,
- tree (*simplify) (gimple, gimple),
+ avail_exprs_stack *avail_exprs_stack,
+ pfn_simplify simplify,
vec<jump_thread_edge *> *path,
bitmap visited,
bool *backedge_seen_p)
temporary equivalences we can detect. */
gimple stmt
= record_temporary_equivalences_from_stmts_at_dest (e, const_and_copies,
+ avail_exprs_stack,
simplify,
*backedge_seen_p);
tree cond;
/* Extract and simplify the condition. */
- cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify,
+ cond = simplify_control_stmt_condition (e, stmt, avail_exprs_stack,
+ dummy_cond, simplify,
handle_dominating_asserts);
if (!cond)
bitmap_set_bit (visited, e->dest->index);
thread_around_empty_blocks (taken_edge,
dummy_cond,
+ avail_exprs_stack,
handle_dominating_asserts,
simplify,
visited,
the simplified condition with left-hand sides of ASSERT_EXPRs they are
used in.
- STACK is used to undo temporary equivalences created during the walk of
- E->dest.
+ CONST_AND_COPIES is used to undo temporary equivalences created during the
+ walk of E->dest.
+
+ The available expression table is referenced vai AVAIL_EXPRS_STACK.
SIMPLIFY is a pass-specific function used to simplify statements. */
thread_across_edge (gcond *dummy_cond,
edge e,
bool handle_dominating_asserts,
- const_and_copies *const_and_copies,
- avail_exprs_stack *avail_exprs_stack,
- tree (*simplify) (gimple, gimple))
+ class const_and_copies *const_and_copies,
+ class avail_exprs_stack *avail_exprs_stack,
+ tree (*simplify) (gimple, gimple,
+ class avail_exprs_stack *))
{
bitmap visited = BITMAP_ALLOC (NULL);
bool backedge_seen;
int threaded = thread_through_normal_block (e, dummy_cond,
handle_dominating_asserts,
- const_and_copies, simplify, path,
+ const_and_copies,
+ avail_exprs_stack,
+ simplify, path,
visited, &backedge_seen);
if (threaded > 0)
{
simplify = dummy_simplify;
found = thread_around_empty_blocks (taken_edge,
dummy_cond,
+ avail_exprs_stack,
handle_dominating_asserts,
simplify,
visited,
if (!found)
found = thread_through_normal_block (path->last ()->e, dummy_cond,
handle_dominating_asserts,
- const_and_copies, simplify, path,
+ const_and_copies,
+ avail_exprs_stack,
+ simplify, path,
visited, &backedge_seen) > 0;
/* If we were able to thread through a successor of E->dest, then