+2013-04-23 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/57036
+ * tree-inline.c (copy_edges_for_bb): Add can_make_abnormal_goto
+ parameter, only add abnormal goto edges from the copied body
+ if the call could perform abnormal gotos.
+ (copy_cfg_body): Adjust.
+
2013-04-23 Sofiane Naci <sofiane.naci@arm.com>
* config/aarch64/aarch64.md (*mov<mode>_aarch64): Add simd attribute.
+2013-04-23 Richard Biener <rguenther@suse.de>
+
+ PR middle-end/57036
+ * gcc.dg/torture/pr57036-1.c: New testcase.
+ * gcc.dg/torture/pr57036-2.c: Likewise.
+
2013-04-23 Sofiane Naci <sofiane.naci@arm.com>
* gcc.target/aarch64/scalar-mov.c: New testcase.
debug stmts are left after a statement that must end the basic block. */
static bool
-copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
+copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
+ bool can_make_abnormal_goto)
{
basic_block new_bb = (basic_block) bb->aux;
edge_iterator ei;
into a COMPONENT_REF which doesn't. If the copy
can throw, the original could also throw. */
can_throw = stmt_can_throw_internal (copy_stmt);
- nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
+ /* If the call we inline cannot make abnormal goto do not add
+ additional abnormal edges but only retain those already present
+ in the original function body. */
+ nonlocal_goto
+ = can_make_abnormal_goto && stmt_can_make_abnormal_goto (copy_stmt);
if (can_throw || nonlocal_goto)
{
last = last_basic_block;
/* Now that we've duplicated the blocks, duplicate their edges. */
+ bool can_make_abormal_goto
+ = id->gimple_call && stmt_can_make_abnormal_goto (id->gimple_call);
FOR_ALL_BB_FN (bb, cfun_to_copy)
if (!blocks_to_copy
|| (bb->index > 0 && bitmap_bit_p (blocks_to_copy, bb->index)))
- need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map);
+ need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
+ can_make_abormal_goto);
if (new_entry)
{