+2018-10-22 Martin Jambor <mjambor@suse.cz>
+
+ * tree-eh.h (stmt_could_throw_p): Add function parameter.
+ (stmt_can_throw_external): Likewise.
+ (stmt_can_throw_internal): Likewise.
+ * tree-eh.c (lower_eh_constructs_2): Pass cfun to stmt_could_throw_p.
+ (lower_eh_constructs_2): Likewise.
+ (stmt_could_throw_p): Add fun parameter, use it instead of cfun.
+ (stmt_can_throw_external): Likewise.
+ (stmt_can_throw_internal): Likewise.
+ (maybe_clean_eh_stmt_fn): Pass cfun to stmt_could_throw_p.
+ (maybe_clean_or_replace_eh_stmt): Pass cfun to stmt_could_throw_p.
+ (maybe_duplicate_eh_stmt_fn): Pass new_fun to stmt_could_throw_p.
+ (maybe_duplicate_eh_stmt): Pass cfun to stmt_could_throw_p.
+ (pass_lower_eh_dispatch::execute): Pass cfun to
+ stmt_can_throw_external.
+ (cleanup_empty_eh): Likewise.
+ (verify_eh_edges): Pass cfun to stmt_could_throw_p.
+ * cgraph.c (cgraph_edge::set_call_stmt): Pass a function to
+ stmt_can_throw_external instead of pushing it to cfun.
+ (symbol_table::create_edge): Likewise.
+ * gimple-fold.c (fold_builtin_atomic_compare_exchange): Pass cfun to
+ stmt_can_throw_internal.
+ * gimple-ssa-evrp.c (evrp_dom_walker::before_dom_children): Pass cfun
+ to stmt_could_throw_p.
+ * gimple-ssa-store-merging.c (handled_load): Pass cfun to
+ stmt_can_throw_internal.
+ (pass_store_merging::execute): Likewise.
+ * gimple-ssa-strength-reduction.c
+ (find_candidates_dom_walker::before_dom_children): Pass cfun to
+ stmt_could_throw_p.
+ * gimplify-me.c (gimple_regimplify_operands): Pass cfun to
+ stmt_can_throw_internal.
+ * ipa-pure-const.c (check_call): Pass cfun to stmt_could_throw_p and
+ to stmt_can_throw_external.
+ (check_stmt): Pass cfun to stmt_could_throw_p.
+ (check_stmt): Pass cfun to stmt_can_throw_external.
+ (pass_nothrow::execute): Likewise.
+ * trans-mem.c (expand_call_tm): Pass cfun to stmt_can_throw_internal.
+ * tree-cfg.c (is_ctrl_altering_stmt): Pass cfun to
+ stmt_can_throw_internal.
+ (verify_gimple_in_cfg): Pass cfun to stmt_could_throw_p.
+ (stmt_can_terminate_bb_p): Pass cfun to stmt_can_throw_external.
+ (gimple_purge_dead_eh_edges): Pass cfun to stmt_can_throw_internal.
+ * tree-complex.c (expand_complex_libcall): Pass cfun to
+ stmt_could_throw_p and to stmt_can_throw_internal.
+ (expand_complex_multiplication): Pass cfun to stmt_can_throw_internal.
+ * tree-inline.c (copy_edges_for_bb): Likewise.
+ (maybe_move_debug_stmts_to_successors): Likewise.
+ * tree-outof-ssa.c (ssa_is_replaceable_p): Pass cfun to
+ stmt_could_throw_p.
+ * tree-parloops.c (oacc_entry_exit_ok_1): Likewise.
+ * tree-sra.c (scan_function): Pass cfun to stmt_can_throw_external.
+ * tree-ssa-alias.c (stmt_kills_ref_p): Pass cfun to
+ stmt_can_throw_internal.
+ * tree-ssa-ccp.c (optimize_atomic_bit_test_and): Likewise.
+ * tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Pass cfun to
+ stmt_could_throw_p.
+ (mark_aliased_reaching_defs_necessary_1): Pass cfun to
+ stmt_can_throw_internal.
+ * tree-ssa-forwprop.c (pass_forwprop::execute): Likewise.
+ * tree-ssa-loop-im.c (movement_possibility): Pass cfun to
+ stmt_could_throw_p.
+ * tree-ssa-loop-ivopts.c (find_givs_in_stmt_scev): Likewise.
+ (add_autoinc_candidates): Pass cfun to stmt_can_throw_internal.
+ * tree-ssa-math-opts.c (pass_cse_reciprocals::execute): Likewise.
+ (convert_mult_to_fma_1): Likewise.
+ (convert_to_divmod): Likewise.
+ * tree-ssa-phiprop.c (propagate_with_phi): Likewise.
+ * tree-ssa-pre.c (compute_avail): Pass cfun to stmt_could_throw_p.
+ * tree-ssa-propagate.c
+ (substitute_and_fold_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-reassoc.c (suitable_cond_bb): Likewise.
+ (maybe_optimize_range_tests): Likewise.
+ (linearize_expr_tree): Likewise.
+ (reassociate_bb): Likewise.
+ * tree-ssa-sccvn.c (copy_reference_ops_from_call): Likewise.
+ * tree-ssa-scopedtables.c (hashable_expr_equal_p): Likewise.
+ * tree-ssa-strlen.c (adjust_last_stmt): Likewise.
+ (handle_char_store): Likewise.
+ * tree-vect-data-refs.c (vect_find_stmt_data_reference): Pass cfun to
+ stmt_can_throw_internal.
+ * tree-vect-patterns.c (check_bool_pattern): Pass cfun to
+ stmt_could_throw_p.
+ * tree-vect-stmts.c (vect_finish_stmt_generation_1): Likewise.
+ (vectorizable_call): Pass cfun to stmt_can_throw_internal.
+ (vectorizable_simd_clone_call): Likewise.
+ * value-prof.c (gimple_ic): Pass cfun to stmt_could_throw_p.
+ (gimple_stringop_fixed_value): Likewise.
+
2018-10-22 Ilya Leoshkevich <iii@linux.ibm.com>
* config/s390/s390.c (s390_loadrelative_operand_p): Accept
e = make_direct (new_callee);
}
- push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
- e->can_throw_external = stmt_can_throw_external (new_stmt);
- pop_cfun ();
+ function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
+ e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
if (e->caller->call_site_hash)
cgraph_add_edge_to_call_site_hash (e);
}
edge->count = count;
edge->call_stmt = call_stmt;
- push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
edge->can_throw_external
- = call_stmt ? stmt_can_throw_external (call_stmt) : false;
- pop_cfun ();
+ = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
+ call_stmt) : false;
if (call_stmt
&& callee && callee->decl
&& !gimple_check_call_matching_types (call_stmt, callee->decl,
gimple_set_vuse (g, gimple_vuse (stmt));
SSA_NAME_DEF_STMT (gimple_vdef (g)) = g;
tree oldlhs = gimple_call_lhs (stmt);
- if (stmt_can_throw_internal (stmt))
+ if (stmt_can_throw_internal (cfun, stmt))
{
throws = true;
e = find_fallthru_edge (gsi_bb (*gsi)->succs);
/* Mark stmts whose output we fully propagate for removal. */
if ((val = value_range_constant_singleton (vr))
&& may_propagate_copy (output, val)
- && !stmt_could_throw_p (stmt)
+ && !stmt_could_throw_p (cfun, stmt)
&& !gimple_has_side_effects (stmt))
{
stmts_to_remove.safe_push (stmt);
}
if (gimple_vuse (stmt)
&& gimple_assign_load_p (stmt)
- && !stmt_can_throw_internal (stmt)
+ && !stmt_can_throw_internal (cfun, stmt)
&& !gimple_has_volatile_ops (stmt))
{
tree mem = gimple_assign_rhs1 (stmt);
}
if (gimple_assign_single_p (stmt) && gimple_vdef (stmt)
- && !stmt_can_throw_internal (stmt)
+ && !stmt_can_throw_internal (cfun, stmt)
&& lhs_valid_for_store_merging_p (gimple_assign_lhs (stmt)))
process_store (stmt);
else
{
gimple *gs = gsi_stmt (gsi);
- if (stmt_could_throw_p (gs))
+ if (stmt_could_throw_p (cfun, gs))
continue;
if (gimple_vuse (gs) && gimple_assign_single_p (gs))
|| !(i & (ECF_CONST | ECF_PURE)))
need_temp = true;
}
- if (stmt_can_throw_internal (stmt))
+ if (stmt_can_throw_internal (cfun, stmt))
need_temp = true;
}
}
{
int flags = gimple_call_flags (call);
tree callee_t = gimple_call_fndecl (call);
- bool possibly_throws = stmt_could_throw_p (call);
+ bool possibly_throws = stmt_could_throw_p (cfun, call);
bool possibly_throws_externally = (possibly_throws
- && stmt_can_throw_external (call));
+ && stmt_can_throw_external (cfun, call));
if (possibly_throws)
{
ipa ? check_ipa_store : check_store);
if (gimple_code (stmt) != GIMPLE_CALL
- && stmt_could_throw_p (stmt))
+ && stmt_could_throw_p (cfun, stmt))
{
if (cfun->can_throw_non_call_exceptions)
{
fprintf (dump_file, " can throw; looping\n");
local->looping = true;
}
- if (stmt_can_throw_external (stmt))
+ if (stmt_can_throw_external (cfun, stmt))
{
if (dump_file)
fprintf (dump_file, " can throw externally\n");
for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
!gsi_end_p (gsi);
gsi_next (&gsi))
- if (stmt_can_throw_external (gsi_stmt (gsi)))
+ if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
{
if (is_gimple_call (gsi_stmt (gsi)))
{
gassign *assign_stmt;
/* Remember if the call was going to throw. */
- if (stmt_can_throw_internal (stmt))
+ if (stmt_can_throw_internal (cfun, stmt))
{
edge_iterator ei;
edge e;
}
/* If a statement can throw, it alters control flow. */
- return stmt_can_throw_internal (t);
+ return stmt_can_throw_internal (cfun, t);
}
visited_throwing_stmts.add (stmt);
if (lp_nr > 0)
{
- if (!stmt_could_throw_p (stmt))
+ if (!stmt_could_throw_p (cfun, stmt))
{
if (verify_nothrow)
{
/* Eh exception not handled internally terminates execution of the whole
function. */
- if (stmt_can_throw_external (t))
+ if (stmt_can_throw_external (cfun, t))
return true;
/* NORETURN and LONGJMP calls already have an edge to exit.
edge_iterator ei;
gimple *stmt = last_stmt (bb);
- if (stmt && stmt_can_throw_internal (stmt))
+ if (stmt && stmt_can_throw_internal (cfun, stmt))
return false;
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
if (inplace_p)
{
gimple *old_stmt = gsi_stmt (*gsi);
- gimple_call_set_nothrow (stmt, !stmt_could_throw_p (old_stmt));
+ gimple_call_set_nothrow (stmt, !stmt_could_throw_p (cfun, old_stmt));
lhs = gimple_assign_lhs (old_stmt);
gimple_call_set_lhs (stmt, lhs);
gsi_replace (gsi, stmt, true);
type = TREE_TYPE (type);
- if (stmt_can_throw_internal (stmt))
+ if (stmt_can_throw_internal (cfun, stmt))
{
edge_iterator ei;
edge e;
/* If optimizing for size or not at all just do a libcall.
Same if there are exception-handling edges or signaling NaNs. */
if (optimize == 0 || optimize_bb_for_size_p (gsi_bb (*gsi))
- || stmt_can_throw_internal (gsi_stmt (*gsi))
+ || stmt_can_throw_internal (cfun, gsi_stmt (*gsi))
|| flag_signaling_nans)
{
expand_complex_libcall (gsi, type, ar, ai, br, bi,
available on the EH edge. Only do so for statements that
potentially fall through (no noreturn calls e.g.), otherwise
this new assignment might create fake fallthru regions. */
- if (stmt_could_throw_p (stmt)
+ if (stmt_could_throw_p (cfun, stmt)
&& gimple_has_lhs (stmt)
&& gimple_stmt_may_fallthru (stmt)
&& !tree_could_throw_p (gimple_get_lhs (stmt))
gsi_insert_after (gsi, s, GSI_SAME_STMT);
}
/* Look for things that can throw exceptions, and record them. */
- if (state->cur_region && stmt_could_throw_p (stmt))
+ if (state->cur_region && stmt_could_throw_p (cfun, stmt))
{
record_stmt_eh_region (state->cur_region, stmt);
note_eh_region_may_contain_throw (state->cur_region);
}
-/* Return true if statement STMT could throw an exception. */
+/* Return true if statement STMT within FUN could throw an exception. */
bool
-stmt_could_throw_p (gimple *stmt)
+stmt_could_throw_p (function *fun, gimple *stmt)
{
if (!flag_exceptions)
return false;
case GIMPLE_COND:
{
- if (!cfun->can_throw_non_call_exceptions)
+ if (fun && !fun->can_throw_non_call_exceptions)
return false;
gcond *cond = as_a <gcond *> (stmt);
tree lhs = gimple_cond_lhs (cond);
}
case GIMPLE_ASSIGN:
- if (!cfun->can_throw_non_call_exceptions
+ if ((fun && !fun->can_throw_non_call_exceptions)
|| gimple_clobber_p (stmt))
return false;
return stmt_could_throw_1_p (as_a <gassign *> (stmt));
case GIMPLE_ASM:
- if (!cfun->can_throw_non_call_exceptions)
+ if (fun && !fun->can_throw_non_call_exceptions)
return false;
return gimple_asm_volatile_p (as_a <gasm *> (stmt));
return false;
}
-/* Return true if STMT can throw an exception that is not caught within
- the current function (CFUN). */
+/* Return true if STMT can throw an exception that is not caught within its
+ function FUN. FUN can be NULL but the function is extra conservative
+ then. */
bool
-stmt_can_throw_external (gimple *stmt)
+stmt_can_throw_external (function *fun, gimple *stmt)
{
int lp_nr;
- if (!stmt_could_throw_p (stmt))
+ if (!stmt_could_throw_p (fun, stmt))
return false;
+ if (!fun)
+ return true;
- lp_nr = lookup_stmt_eh_lp (stmt);
+ lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
return lp_nr == 0;
}
-/* Return true if STMT can throw an exception that is caught within
- the current function (CFUN). */
+/* Return true if STMT can throw an exception that is caught within its
+ function FUN. */
bool
-stmt_can_throw_internal (gimple *stmt)
+stmt_can_throw_internal (function *fun, gimple *stmt)
{
int lp_nr;
- if (!stmt_could_throw_p (stmt))
+ gcc_checking_assert (fun);
+ if (!stmt_could_throw_p (fun, stmt))
return false;
- lp_nr = lookup_stmt_eh_lp (stmt);
+ lp_nr = lookup_stmt_eh_lp_fn (fun, stmt);
return lp_nr > 0;
}
bool
maybe_clean_eh_stmt_fn (struct function *ifun, gimple *stmt)
{
- if (stmt_could_throw_p (stmt))
+ if (stmt_could_throw_p (ifun, stmt))
return false;
return remove_stmt_from_eh_lp_fn (ifun, stmt);
}
if (lp_nr != 0)
{
- bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
+ bool new_stmt_could_throw = stmt_could_throw_p (cfun, new_stmt);
if (new_stmt == old_stmt && new_stmt_could_throw)
return false;
{
int old_lp_nr, new_lp_nr;
- if (!stmt_could_throw_p (new_stmt))
+ if (!stmt_could_throw_p (new_fun, new_stmt))
return false;
old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
{
int lp_nr;
- if (!stmt_could_throw_p (new_stmt))
+ if (!stmt_could_throw_p (cfun, new_stmt))
return false;
lp_nr = lookup_stmt_eh_lp (old_stmt);
}
else if (gimple_code (last) == GIMPLE_RESX)
{
- if (stmt_can_throw_external (last))
+ if (stmt_can_throw_external (cfun, last))
optimize_clobbers (bb);
else
flags |= sink_clobbers (bb);
resx = gsi_stmt (gsi);
if (resx && is_gimple_resx (resx))
{
- if (stmt_can_throw_external (resx))
+ if (stmt_can_throw_external (cfun, resx))
optimize_clobbers (bb);
else if (sink_clobbers (bb))
ret = true;
return false;
}
- if (!stmt_could_throw_p (stmt))
+ if (!stmt_could_throw_p (cfun, stmt))
{
error ("BB %i last statement has incorrectly set lp", bb->index);
return true;
extern bool operation_could_trap_p (enum tree_code, bool, bool, tree);
extern bool tree_could_trap_p (tree);
extern tree rewrite_to_non_trapping_overflow (tree);
-extern bool stmt_could_throw_p (gimple *);
+extern bool stmt_could_throw_p (function *, gimple *);
extern bool tree_could_throw_p (tree);
-extern bool stmt_can_throw_external (gimple *);
-extern bool stmt_can_throw_internal (gimple *);
+extern bool stmt_can_throw_external (function *, gimple *);
+extern bool stmt_can_throw_internal (function *, gimple *);
extern bool maybe_clean_eh_stmt_fn (struct function *, gimple *);
extern bool maybe_clean_eh_stmt (gimple *);
extern bool maybe_clean_or_replace_eh_stmt (gimple *, gimple *);
propagation can change an INDIRECT_REF which throws
into a COMPONENT_REF which doesn't. If the copy
can throw, the original could also throw. */
- can_throw = stmt_can_throw_internal (copy_stmt);
+ can_throw = stmt_can_throw_internal (cfun, copy_stmt);
nonlocal_goto
= (stmt_can_make_abnormal_goto (copy_stmt)
&& !computed_goto_p (copy_stmt));
if (gsi_end_p (si)
|| gsi_one_before_end_p (si)
- || !(stmt_can_throw_internal (gsi_stmt (si))
+ || !(stmt_can_throw_internal (cfun, gsi_stmt (si))
|| stmt_can_make_abnormal_goto (gsi_stmt (si))))
return;
return false;
/* If the statement may throw an exception, it cannot be replaced. */
- if (stmt_could_throw_p (stmt))
+ if (stmt_could_throw_p (cfun, stmt))
return false;
/* Punt if there is more than 1 def. */
continue;
else if (!gimple_has_side_effects (stmt)
&& !gimple_could_trap_p (stmt)
- && !stmt_could_throw_p (stmt)
+ && !stmt_could_throw_p (cfun, stmt)
&& !gimple_vdef (stmt)
&& !gimple_vuse (stmt))
continue;
tree t;
unsigned i;
- if (final_bbs && stmt_can_throw_external (stmt))
+ if (final_bbs && stmt_can_throw_external (cfun, stmt))
bitmap_set_bit (final_bbs, bb->index);
switch (gimple_code (stmt))
{
??? We only need to care about the RHS throwing. For aggregate
assignments or similar calls and non-call exceptions the LHS
might throw as well. */
- && !stmt_can_throw_internal (stmt))
+ && !stmt_can_throw_internal (cfun, stmt))
{
tree lhs = gimple_get_lhs (stmt);
/* If LHS is literally a base of the access we are done. */
gimple_set_location (g, gimple_location (call));
gimple_set_vuse (g, gimple_vuse (call));
gimple_set_vdef (g, gimple_vdef (call));
- bool throws = stmt_can_throw_internal (call);
+ bool throws = stmt_can_throw_internal (cfun, call);
gimple_call_set_nothrow (as_a <gcall *> (g),
gimple_call_nothrow_p (as_a <gcall *> (call)));
SSA_NAME_DEF_STMT (gimple_vdef (call)) = g;
throw. If a statement could throw, it can be deemed necessary. */
if (cfun->can_throw_non_call_exceptions
&& !cfun->can_delete_dead_exceptions
- && stmt_could_throw_p (stmt))
+ && stmt_could_throw_p (cfun, stmt))
{
mark_stmt_necessary (stmt, true);
return;
??? We only need to care about the RHS throwing. For aggregate
assignments or similar calls and non-call exceptions the LHS
might throw as well. */
- && !stmt_can_throw_internal (def_stmt))
+ && !stmt_can_throw_internal (cfun, def_stmt))
{
tree base, lhs = gimple_get_lhs (def_stmt);
poly_int64 size, offset, max_size;
&& !gimple_has_volatile_ops (stmt)
&& (TREE_CODE (gimple_assign_rhs1 (stmt))
!= TARGET_MEM_REF)
- && !stmt_can_throw_internal (stmt))
+ && !stmt_can_throw_internal (cfun, stmt))
{
/* Rewrite loads used only in real/imagpart extractions to
component-wise loads. */
if (stmt_ends_bb_p (stmt)
|| gimple_has_volatile_ops (stmt)
|| gimple_has_side_effects (stmt)
- || stmt_could_throw_p (stmt))
+ || stmt_could_throw_p (cfun, stmt))
return MOVE_IMPOSSIBLE;
if (is_gimple_call (stmt))
/* If STMT could throw, then do not consider STMT as defining a GIV.
While this will suppress optimizations, we can not safely delete this
GIV and associated statements, even if it appears it is not used. */
- if (stmt_could_throw_p (stmt))
+ if (stmt_could_throw_p (cfun, stmt))
return false;
return true;
statement. */
if (use_bb->loop_father != data->current_loop
|| !dominated_by_p (CDI_DOMINATORS, data->current_loop->latch, use_bb)
- || stmt_can_throw_internal (use->stmt)
+ || stmt_can_throw_internal (cfun, use->stmt)
|| !cst_and_fits_in_hwi (step))
return;
stmt = gsi_stmt (gsi);
if (flag_unsafe_math_optimizations
&& is_gimple_assign (stmt)
- && !stmt_can_throw_internal (stmt)
+ && !stmt_can_throw_internal (cfun, stmt)
&& gimple_assign_rhs_code (stmt) == RDIV_EXPR)
optimize_recip_sqrt (&gsi, def);
}
else
fma_stmt = gimple_build_call_internal (IFN_FMA, 3, mulop1, op2, addop);
gimple_set_lhs (fma_stmt, gimple_get_lhs (use_stmt));
- gimple_call_set_nothrow (fma_stmt, !stmt_can_throw_internal (use_stmt));
+ gimple_call_set_nothrow (fma_stmt, !stmt_can_throw_internal (cfun,
+ use_stmt));
gsi_replace (&gsi, fma_stmt, true);
/* Follow all SSA edges so that we generate FMS, FNMA and FNMS
regardless of where the negation occurs. */
static bool
convert_to_divmod (gassign *stmt)
{
- if (stmt_can_throw_internal (stmt)
+ if (stmt_can_throw_internal (cfun, stmt)
|| !divmod_candidate_p (stmt))
return false;
&& operand_equal_p (op1, gimple_assign_rhs1 (use_stmt), 0)
&& operand_equal_p (op2, gimple_assign_rhs2 (use_stmt), 0))
{
- if (stmt_can_throw_internal (use_stmt))
+ if (stmt_can_throw_internal (cfun, use_stmt))
continue;
basic_block bb = gimple_bb (use_stmt);
&& operand_equal_p (top_op2, gimple_assign_rhs2 (use_stmt), 0))
{
if (use_stmt == top_stmt
- || stmt_can_throw_internal (use_stmt)
+ || stmt_can_throw_internal (cfun, use_stmt)
|| !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), top_bb))
continue;
|| types_compatible_p
(TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
/* We cannot replace a load that may throw or is volatile. */
- && !stmt_can_throw_internal (use_stmt)))
+ && !stmt_can_throw_internal (cfun, use_stmt)))
continue;
/* Check if we can move the loads. The def stmt of the virtual use
BB_LIVE_VOP_ON_EXIT (block) = gimple_vdef (stmt);
if (gimple_has_side_effects (stmt)
- || stmt_could_throw_p (stmt)
+ || stmt_could_throw_p (cfun, stmt)
|| is_gimple_debug (stmt))
continue;
if (sprime
&& sprime != lhs
&& may_propagate_copy (lhs, sprime)
- && !stmt_could_throw_p (stmt)
+ && !stmt_could_throw_p (cfun, stmt)
&& !gimple_has_side_effects (stmt)
/* We have to leave ASSERT_EXPRs around for jump-threading. */
&& (!is_gimple_assign (stmt)
|| (gimple_code (stmt) != GIMPLE_COND
&& (backward || !final_range_test_p (stmt)))
|| gimple_visited_p (stmt)
- || stmt_could_throw_p (stmt)
+ || stmt_could_throw_p (cfun, stmt)
|| *other_bb == bb)
return false;
is_cond = gimple_code (stmt) == GIMPLE_COND;
else
return cfg_cleanup_needed;
- if (stmt_could_throw_p (stmt))
+ if (stmt_could_throw_p (cfun, stmt))
return cfg_cleanup_needed;
/* As relative ordering of post-dominator sons isn't fixed,
{
binlhsdef = SSA_NAME_DEF_STMT (binlhs);
binlhsisreassoc = (is_reassociable_op (binlhsdef, rhscode, loop)
- && !stmt_could_throw_p (binlhsdef));
+ && !stmt_could_throw_p (cfun, binlhsdef));
}
if (TREE_CODE (binrhs) == SSA_NAME)
{
binrhsdef = SSA_NAME_DEF_STMT (binrhs);
binrhsisreassoc = (is_reassociable_op (binrhsdef, rhscode, loop)
- && !stmt_could_throw_p (binrhsdef));
+ && !stmt_could_throw_p (cfun, binrhsdef));
}
/* If the LHS is not reassociable, but the RHS is, we need to swap
stmt = gsi_stmt (gsi);
if (is_gimple_assign (stmt)
- && !stmt_could_throw_p (stmt))
+ && !stmt_could_throw_p (cfun, stmt))
{
tree lhs, rhs1, rhs2;
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
temp.opcode = CALL_EXPR;
temp.op0 = gimple_call_fn (call);
temp.op1 = gimple_call_chain (call);
- if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
+ if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
temp.op2 = size_int (lr);
temp.off = -1;
result->safe_push (temp);
expr1->ops.call.args[i], 0))
return false;
- if (stmt_could_throw_p (expr0->ops.call.fn_from))
+ if (stmt_could_throw_p (cfun, expr0->ops.call.fn_from))
{
int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
if (!integer_zerop (gimple_assign_rhs1 (last.stmt)))
return;
- if (stmt_could_throw_p (last.stmt))
+ if (stmt_could_throw_p (cfun, last.stmt))
return;
gsi = gsi_for_stmt (last.stmt);
unlink_stmt_vdef (last.stmt);
{
/* When overwriting a '\0' with a '\0', the store can be removed
if we know it has been stored in the current function. */
- if (!stmt_could_throw_p (stmt) && si->writable)
+ if (!stmt_could_throw_p (cfun, stmt) && si->writable)
{
unlink_stmt_vdef (stmt);
release_defs (stmt);
return opt_result::failure_at (stmt, "not vectorized: volatile type: %G",
stmt);
- if (stmt_can_throw_internal (stmt))
+ if (stmt_can_throw_internal (cfun, stmt))
return opt_result::failure_at (stmt,
"not vectorized:"
" statement can throw an exception: %G",
/* If the comparison can throw, then is_gimple_condexpr will be
false and we can't make a COND_EXPR/VEC_COND_EXPR out of it. */
- if (stmt_could_throw_p (def_stmt))
+ if (stmt_could_throw_p (cfun, def_stmt))
return false;
comp_vectype = get_vectype_for_scalar_type (TREE_TYPE (rhs1));
e.g. be in a must-not-throw region. Ensure newly created stmts
that could throw are part of the same region. */
int lp_nr = lookup_stmt_eh_lp (stmt_info->stmt);
- if (lp_nr != 0 && stmt_could_throw_p (vec_stmt))
+ if (lp_nr != 0 && stmt_could_throw_p (cfun, vec_stmt))
add_stmt_to_eh_lp (vec_stmt, lp_nr);
return vec_stmt_info;
|| TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME)
return false;
- gcc_checking_assert (!stmt_can_throw_internal (stmt));
+ gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt));
vectype_out = STMT_VINFO_VECTYPE (stmt_info);
&& TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME)
return false;
- gcc_checking_assert (!stmt_can_throw_internal (stmt));
+ gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt));
vectype = STMT_VINFO_VECTYPE (stmt_info);
/* Build an EH edge for the direct call if necessary. */
lp_nr = lookup_stmt_eh_lp (icall_stmt);
- if (lp_nr > 0 && stmt_could_throw_p (dcall_stmt))
+ if (lp_nr > 0 && stmt_could_throw_p (cfun, dcall_stmt))
{
add_stmt_to_eh_lp (dcall_stmt, lp_nr);
}
PHI_ARG_DEF_FROM_EDGE (phi, e_eh));
}
}
- if (!stmt_could_throw_p (dcall_stmt))
+ if (!stmt_could_throw_p (cfun, dcall_stmt))
gimple_purge_dead_eh_edges (dcall_bb);
return dcall_stmt;
}
}
/* Because these are all string op builtins, they're all nothrow. */
- gcc_assert (!stmt_could_throw_p (vcall_stmt));
- gcc_assert (!stmt_could_throw_p (icall_stmt));
+ gcc_assert (!stmt_could_throw_p (cfun, vcall_stmt));
+ gcc_assert (!stmt_could_throw_p (cfun, icall_stmt));
}
/* Find values inside STMT for that we want to measure histograms for