/* If we didn't replace the whole stmt (or propagate the result
into all uses), replace all uses on this stmt with their
leaders. */
+ bool modified = false;
use_operand_p use_p;
ssa_op_iter iter;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
|| !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
{
propagate_value (use_p, sprime);
- gimple_set_modified (stmt, true);
+ modified = true;
if (TREE_CODE (sprime) == SSA_NAME
&& !is_gimple_debug (stmt))
gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
}
}
+ /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
+ into which is a requirement for the IPA devirt machinery. */
+ gimple *old_stmt = stmt;
+ if (modified)
+ {
+ /* If a formerly non-invariant ADDR_EXPR is turned into an
+ invariant one it was on a separate stmt. */
+ if (gimple_assign_single_p (stmt)
+ && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
+ recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
+ gimple_stmt_iterator prev = gsi;
+ gsi_prev (&prev);
+ if (fold_stmt (&gsi))
+ {
+ /* fold_stmt may have created new stmts inbetween
+ the previous stmt and the folded stmt. Mark
+ all defs created there as varying to not confuse
+ the SCCVN machinery as we're using that even during
+ elimination. */
+ if (gsi_end_p (prev))
+ prev = gsi_start_bb (b);
+ else
+ gsi_next (&prev);
+ if (gsi_stmt (prev) != gsi_stmt (gsi))
+ do
+ {
+ tree def;
+ ssa_op_iter dit;
+ FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
+ dit, SSA_OP_ALL_DEFS)
+ /* As existing DEFs may move between stmts
+ we have to guard VN_INFO_GET. */
+ if (! has_VN_INFO (def))
+ VN_INFO_GET (def)->valnum = def;
+ if (gsi_stmt (prev) == gsi_stmt (gsi))
+ break;
+ gsi_next (&prev);
+ }
+ while (1);
+ }
+ stmt = gsi_stmt (gsi);
+ /* In case we folded the stmt away schedule the NOP for removal. */
+ if (gimple_nop_p (stmt))
+ el_to_remove.safe_push (stmt);
+ }
+
/* Visit indirect calls and turn them into direct calls if
- possible using the devirtualization machinery. */
+ possible using the devirtualization machinery. Do this before
+ checking for required EH/abnormal/noreturn cleanup as devird
+ may expose more of those. */
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
tree fn = gimple_call_fn (call_stmt);
&& virtual_method_call_p (fn))
{
tree otr_type = obj_type_ref_class (fn);
+ unsigned HOST_WIDE_INT otr_tok
+ = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
tree instance;
- ipa_polymorphic_call_context context (current_function_decl, fn, stmt, &instance);
+ ipa_polymorphic_call_context context (current_function_decl,
+ fn, stmt, &instance);
+ context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
+ otr_type, stmt);
bool final;
-
- context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn), otr_type, stmt);
-
- vec <cgraph_node *>targets
+ vec <cgraph_node *> targets
= possible_polymorphic_call_targets (obj_type_ref_class (fn),
- tree_to_uhwi
- (OBJ_TYPE_REF_TOKEN (fn)),
- context,
- &final);
+ otr_tok, context, &final);
if (dump_file)
dump_possible_polymorphic_call_targets (dump_file,
obj_type_ref_class (fn),
- tree_to_uhwi
- (OBJ_TYPE_REF_TOKEN (fn)),
- context);
+ otr_tok, context);
if (final && targets.length () <= 1 && dbg_cnt (devirt))
{
tree fn;
fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
if (dump_enabled_p ())
{
- location_t loc = gimple_location_safe (stmt);
+ location_t loc = gimple_location (stmt);
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
"converting indirect call to "
"function %s\n",
== void_type_node))
gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
maybe_remove_unused_call_args (cfun, call_stmt);
- gimple_set_modified (stmt, true);
+ modified = true;
}
}
}
- if (gimple_modified_p (stmt))
+ if (modified)
{
- /* If a formerly non-invariant ADDR_EXPR is turned into an
- invariant one it was on a separate stmt. */
- if (gimple_assign_single_p (stmt)
- && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
- recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
- gimple *old_stmt = stmt;
- gimple_stmt_iterator prev = gsi;
- gsi_prev (&prev);
- if (fold_stmt (&gsi))
- {
- /* fold_stmt may have created new stmts inbetween
- the previous stmt and the folded stmt. Mark
- all defs created there as varying to not confuse
- the SCCVN machinery as we're using that even during
- elimination. */
- if (gsi_end_p (prev))
- prev = gsi_start_bb (b);
- else
- gsi_next (&prev);
- if (gsi_stmt (prev) != gsi_stmt (gsi))
- do
- {
- tree def;
- ssa_op_iter dit;
- FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
- dit, SSA_OP_ALL_DEFS)
- /* As existing DEFs may move between stmts
- we have to guard VN_INFO_GET. */
- if (! has_VN_INFO (def))
- VN_INFO_GET (def)->valnum = def;
- if (gsi_stmt (prev) == gsi_stmt (gsi))
- break;
- gsi_next (&prev);
- }
- while (1);
- }
- stmt = gsi_stmt (gsi);
/* When changing a call into a noreturn call, cfg cleanup
is needed to fix up the noreturn call. */
if (!was_noreturn