1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
31 #include "diagnostic-core.h"
32 #include "fold-const.h"
36 #include "cfgcleanup.h"
38 #include "gimple-iterator.h"
40 #include "tree-into-ssa.h"
42 #include "tree-inline.h"
43 #include "langhooks.h"
45 #include "gimple-low.h"
46 #include "stringpool.h"
51 /* In some instances a tree and a gimple need to be stored in a same table,
52 i.e. in hash tables. This is a structure to do this. */
53 typedef union {tree
*tp
; tree t
; gimple
*g
;} treemple
;
55 /* Misc functions used in this file. */
57 /* Remember and lookup EH landing pad data for arbitrary statements.
58 Really this means any statement that could_throw_p. We could
59 stuff this information into the stmt_ann data structure, but:
61 (1) We absolutely rely on this information being kept until
62 we get to rtl. Once we're done with lowering here, if we lose
63 the information there's no way to recover it!
65 (2) There are many more statements that *cannot* throw as
66 compared to those that can. We should be saving some amount
67 of space by only allocating memory for those that can throw. */
69 /* Add statement T in function IFUN to landing pad NUM. */
72 add_stmt_to_eh_lp_fn (struct function
*ifun
, gimple
*t
, int num
)
74 gcc_assert (num
!= 0);
76 if (!get_eh_throw_stmt_table (ifun
))
77 set_eh_throw_stmt_table (ifun
, hash_map
<gimple
*, int>::create_ggc (31));
79 gcc_assert (!get_eh_throw_stmt_table (ifun
)->put (t
, num
));
82 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
85 add_stmt_to_eh_lp (gimple
*t
, int num
)
87 add_stmt_to_eh_lp_fn (cfun
, t
, num
);
90 /* Add statement T to the single EH landing pad in REGION. */
93 record_stmt_eh_region (eh_region region
, gimple
*t
)
97 if (region
->type
== ERT_MUST_NOT_THROW
)
98 add_stmt_to_eh_lp_fn (cfun
, t
, -region
->index
);
101 eh_landing_pad lp
= region
->landing_pads
;
103 lp
= gen_eh_landing_pad (region
);
105 gcc_assert (lp
->next_lp
== NULL
);
106 add_stmt_to_eh_lp_fn (cfun
, t
, lp
->index
);
111 /* Remove statement T in function IFUN from its EH landing pad. */
114 remove_stmt_from_eh_lp_fn (struct function
*ifun
, gimple
*t
)
116 if (!get_eh_throw_stmt_table (ifun
))
119 if (!get_eh_throw_stmt_table (ifun
)->get (t
))
122 get_eh_throw_stmt_table (ifun
)->remove (t
);
127 /* Remove statement T in the current function (cfun) from its
131 remove_stmt_from_eh_lp (gimple
*t
)
133 return remove_stmt_from_eh_lp_fn (cfun
, t
);
136 /* Determine if statement T is inside an EH region in function IFUN.
137 Positive numbers indicate a landing pad index; negative numbers
138 indicate a MUST_NOT_THROW region index; zero indicates that the
139 statement is not recorded in the region table. */
142 lookup_stmt_eh_lp_fn (struct function
*ifun
, gimple
*t
)
144 if (ifun
->eh
->throw_stmt_table
== NULL
)
147 int *lp_nr
= ifun
->eh
->throw_stmt_table
->get (t
);
148 return lp_nr
? *lp_nr
: 0;
151 /* Likewise, but always use the current function. */
154 lookup_stmt_eh_lp (gimple
*t
)
156 /* We can get called from initialized data when -fnon-call-exceptions
157 is on; prevent crash. */
160 return lookup_stmt_eh_lp_fn (cfun
, t
);
163 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
167 struct finally_tree_node
169 /* When storing a GIMPLE_TRY, we have to record a gimple. However
170 when deciding whether a GOTO to a certain LABEL_DECL (which is a
171 tree) leaves the TRY block, its necessary to record a tree in
172 this field. Thus a treemple is used. */
177 /* Hashtable helpers. */
179 struct finally_tree_hasher
: free_ptr_hash
<finally_tree_node
>
181 static inline hashval_t
hash (const finally_tree_node
*);
182 static inline bool equal (const finally_tree_node
*,
183 const finally_tree_node
*);
187 finally_tree_hasher::hash (const finally_tree_node
*v
)
189 return (intptr_t)v
->child
.t
>> 4;
193 finally_tree_hasher::equal (const finally_tree_node
*v
,
194 const finally_tree_node
*c
)
196 return v
->child
.t
== c
->child
.t
;
199 /* Note that this table is *not* marked GTY. It is short-lived. */
200 static hash_table
<finally_tree_hasher
> *finally_tree
;
203 record_in_finally_tree (treemple child
, gtry
*parent
)
205 struct finally_tree_node
*n
;
206 finally_tree_node
**slot
;
208 n
= XNEW (struct finally_tree_node
);
212 slot
= finally_tree
->find_slot (n
, INSERT
);
218 collect_finally_tree (gimple
*stmt
, gtry
*region
);
220 /* Go through the gimple sequence. Works with collect_finally_tree to
221 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
224 collect_finally_tree_1 (gimple_seq seq
, gtry
*region
)
226 gimple_stmt_iterator gsi
;
228 for (gsi
= gsi_start (seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
229 collect_finally_tree (gsi_stmt (gsi
), region
);
233 collect_finally_tree (gimple
*stmt
, gtry
*region
)
237 switch (gimple_code (stmt
))
240 temp
.t
= gimple_label_label (as_a
<glabel
*> (stmt
));
241 record_in_finally_tree (temp
, region
);
245 if (gimple_try_kind (stmt
) == GIMPLE_TRY_FINALLY
)
248 record_in_finally_tree (temp
, region
);
249 collect_finally_tree_1 (gimple_try_eval (stmt
),
250 as_a
<gtry
*> (stmt
));
251 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
253 else if (gimple_try_kind (stmt
) == GIMPLE_TRY_CATCH
)
255 collect_finally_tree_1 (gimple_try_eval (stmt
), region
);
256 collect_finally_tree_1 (gimple_try_cleanup (stmt
), region
);
261 collect_finally_tree_1 (gimple_catch_handler (
262 as_a
<gcatch
*> (stmt
)),
266 case GIMPLE_EH_FILTER
:
267 collect_finally_tree_1 (gimple_eh_filter_failure (stmt
), region
);
272 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
273 collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt
), region
);
274 collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt
), region
);
279 /* A type, a decl, or some kind of statement that we're not
280 interested in. Don't walk them. */
286 /* Use the finally tree to determine if a jump from START to TARGET
287 would leave the try_finally node that START lives in. */
290 outside_finally_tree (treemple start
, gimple
*target
)
292 struct finally_tree_node n
, *p
;
297 p
= finally_tree
->find (&n
);
302 while (start
.g
!= target
);
307 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
308 nodes into a set of gotos, magic labels, and eh regions.
309 The eh region creation is straight-forward, but frobbing all the gotos
310 and such into shape isn't. */
312 /* The sequence into which we record all EH stuff. This will be
313 placed at the end of the function when we're all done. */
314 static gimple_seq eh_seq
;
316 /* Record whether an EH region contains something that can throw,
317 indexed by EH region number. */
318 static bitmap eh_region_may_contain_throw_map
;
320 /* The GOTO_QUEUE is an array of GIMPLE_GOTO and GIMPLE_RETURN
321 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
322 The idea is to record a gimple statement for everything except for
323 the conditionals, which get their labels recorded. Since labels are
324 of type 'tree', we need this node to store both gimple and tree
325 objects. REPL_STMT is the sequence used to replace the goto/return
326 statement. CONT_STMT is used to store the statement that allows
327 the return/goto to jump to the original destination. */
329 struct goto_queue_node
333 gimple_seq repl_stmt
;
336 /* This is used when index >= 0 to indicate that stmt is a label (as
337 opposed to a goto stmt). */
341 /* State of the world while lowering. */
345 /* What's "current" while constructing the eh region tree. These
346 correspond to variables of the same name in cfun->eh, which we
347 don't have easy access to. */
348 eh_region cur_region
;
350 /* What's "current" for the purposes of __builtin_eh_pointer. For
351 a CATCH, this is the associated TRY. For an EH_FILTER, this is
352 the associated ALLOWED_EXCEPTIONS, etc. */
353 eh_region ehp_region
;
355 /* Processing of TRY_FINALLY requires a bit more state. This is
356 split out into a separate structure so that we don't have to
357 copy so much when processing other nodes. */
358 struct leh_tf_state
*tf
;
363 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
364 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
365 this so that outside_finally_tree can reliably reference the tree used
366 in the collect_finally_tree data structures. */
367 gtry
*try_finally_expr
;
370 /* While lowering a top_p usually it is expanded into multiple statements,
371 thus we need the following field to store them. */
372 gimple_seq top_p_seq
;
374 /* The state outside this try_finally node. */
375 struct leh_state
*outer
;
377 /* The exception region created for it. */
380 /* The goto queue. */
381 struct goto_queue_node
*goto_queue
;
382 size_t goto_queue_size
;
383 size_t goto_queue_active
;
385 /* Pointer map to help in searching goto_queue when it is large. */
386 hash_map
<gimple
*, goto_queue_node
*> *goto_queue_map
;
388 /* The set of unique labels seen as entries in the goto queue. */
389 vec
<tree
> dest_array
;
391 /* A label to be added at the end of the completed transformed
392 sequence. It will be set if may_fallthru was true *at one time*,
393 though subsequent transformations may have cleared that flag. */
396 /* True if it is possible to fall out the bottom of the try block.
397 Cleared if the fallthru is converted to a goto. */
400 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
403 /* True if the finally block can receive an exception edge.
404 Cleared if the exception case is handled by code duplication. */
408 static gimple_seq
lower_eh_must_not_throw (struct leh_state
*, gtry
*);
410 /* Search for STMT in the goto queue. Return the replacement,
411 or null if the statement isn't in the queue. */
413 #define LARGE_GOTO_QUEUE 20
415 static void lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq
*seq
);
418 find_goto_replacement (struct leh_tf_state
*tf
, treemple stmt
)
422 if (tf
->goto_queue_active
< LARGE_GOTO_QUEUE
)
424 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
425 if ( tf
->goto_queue
[i
].stmt
.g
== stmt
.g
)
426 return tf
->goto_queue
[i
].repl_stmt
;
430 /* If we have a large number of entries in the goto_queue, create a
431 pointer map and use that for searching. */
433 if (!tf
->goto_queue_map
)
435 tf
->goto_queue_map
= new hash_map
<gimple
*, goto_queue_node
*>;
436 for (i
= 0; i
< tf
->goto_queue_active
; i
++)
438 bool existed
= tf
->goto_queue_map
->put (tf
->goto_queue
[i
].stmt
.g
,
440 gcc_assert (!existed
);
444 goto_queue_node
**slot
= tf
->goto_queue_map
->get (stmt
.g
);
446 return ((*slot
)->repl_stmt
);
451 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
452 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
453 then we can just splat it in, otherwise we add the new stmts immediately
454 after the GIMPLE_COND and redirect. */
457 replace_goto_queue_cond_clause (tree
*tp
, struct leh_tf_state
*tf
,
458 gimple_stmt_iterator
*gsi
)
463 location_t loc
= gimple_location (gsi_stmt (*gsi
));
466 new_seq
= find_goto_replacement (tf
, temp
);
470 if (gimple_seq_singleton_p (new_seq
)
471 && gimple_code (gimple_seq_first_stmt (new_seq
)) == GIMPLE_GOTO
)
473 *tp
= gimple_goto_dest (gimple_seq_first_stmt (new_seq
));
477 label
= create_artificial_label (loc
);
478 /* Set the new label for the GIMPLE_COND */
481 gsi_insert_after (gsi
, gimple_build_label (label
), GSI_CONTINUE_LINKING
);
482 gsi_insert_seq_after (gsi
, gimple_seq_copy (new_seq
), GSI_CONTINUE_LINKING
);
485 /* The real work of replace_goto_queue. Returns with TSI updated to
486 point to the next statement. */
488 static void replace_goto_queue_stmt_list (gimple_seq
*, struct leh_tf_state
*);
491 replace_goto_queue_1 (gimple
*stmt
, struct leh_tf_state
*tf
,
492 gimple_stmt_iterator
*gsi
)
498 switch (gimple_code (stmt
))
503 seq
= find_goto_replacement (tf
, temp
);
506 gimple_stmt_iterator i
;
507 seq
= gimple_seq_copy (seq
);
508 for (i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
509 gimple_set_location (gsi_stmt (i
), gimple_location (stmt
));
510 gsi_insert_seq_before (gsi
, seq
, GSI_SAME_STMT
);
511 gsi_remove (gsi
, false);
517 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 2), tf
, gsi
);
518 replace_goto_queue_cond_clause (gimple_op_ptr (stmt
, 3), tf
, gsi
);
522 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt
), tf
);
523 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt
), tf
);
526 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
527 as_a
<gcatch
*> (stmt
)),
530 case GIMPLE_EH_FILTER
:
531 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt
), tf
);
535 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (stmt
);
536 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt
),
538 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt
),
544 /* These won't have gotos in them. */
551 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
554 replace_goto_queue_stmt_list (gimple_seq
*seq
, struct leh_tf_state
*tf
)
556 gimple_stmt_iterator gsi
= gsi_start (*seq
);
558 while (!gsi_end_p (gsi
))
559 replace_goto_queue_1 (gsi_stmt (gsi
), tf
, &gsi
);
562 /* Replace all goto queue members. */
565 replace_goto_queue (struct leh_tf_state
*tf
)
567 if (tf
->goto_queue_active
== 0)
569 replace_goto_queue_stmt_list (&tf
->top_p_seq
, tf
);
570 replace_goto_queue_stmt_list (&eh_seq
, tf
);
573 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
574 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
578 record_in_goto_queue (struct leh_tf_state
*tf
,
585 struct goto_queue_node
*q
;
587 gcc_assert (!tf
->goto_queue_map
);
589 active
= tf
->goto_queue_active
;
590 size
= tf
->goto_queue_size
;
593 size
= (size
? size
* 2 : 32);
594 tf
->goto_queue_size
= size
;
596 = XRESIZEVEC (struct goto_queue_node
, tf
->goto_queue
, size
);
599 q
= &tf
->goto_queue
[active
];
600 tf
->goto_queue_active
= active
+ 1;
602 memset (q
, 0, sizeof (*q
));
605 q
->location
= location
;
606 q
->is_label
= is_label
;
609 /* Record the LABEL label in the goto queue contained in TF.
613 record_in_goto_queue_label (struct leh_tf_state
*tf
, treemple stmt
, tree label
,
617 treemple temp
, new_stmt
;
622 /* Computed and non-local gotos do not get processed. Given
623 their nature we can neither tell whether we've escaped the
624 finally block nor redirect them if we knew. */
625 if (TREE_CODE (label
) != LABEL_DECL
)
628 /* No need to record gotos that don't leave the try block. */
630 if (!outside_finally_tree (temp
, tf
->try_finally_expr
))
633 if (! tf
->dest_array
.exists ())
635 tf
->dest_array
.create (10);
636 tf
->dest_array
.quick_push (label
);
641 int n
= tf
->dest_array
.length ();
642 for (index
= 0; index
< n
; ++index
)
643 if (tf
->dest_array
[index
] == label
)
646 tf
->dest_array
.safe_push (label
);
649 /* In the case of a GOTO we want to record the destination label,
650 since with a GIMPLE_COND we have an easy access to the then/else
653 record_in_goto_queue (tf
, new_stmt
, index
, true, location
);
656 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
657 node, and if so record that fact in the goto queue associated with that
661 maybe_record_in_goto_queue (struct leh_state
*state
, gimple
*stmt
)
663 struct leh_tf_state
*tf
= state
->tf
;
669 switch (gimple_code (stmt
))
673 gcond
*cond_stmt
= as_a
<gcond
*> (stmt
);
674 new_stmt
.tp
= gimple_op_ptr (cond_stmt
, 2);
675 record_in_goto_queue_label (tf
, new_stmt
,
676 gimple_cond_true_label (cond_stmt
),
677 EXPR_LOCATION (*new_stmt
.tp
));
678 new_stmt
.tp
= gimple_op_ptr (cond_stmt
, 3);
679 record_in_goto_queue_label (tf
, new_stmt
,
680 gimple_cond_false_label (cond_stmt
),
681 EXPR_LOCATION (*new_stmt
.tp
));
686 record_in_goto_queue_label (tf
, new_stmt
, gimple_goto_dest (stmt
),
687 gimple_location (stmt
));
691 tf
->may_return
= true;
693 record_in_goto_queue (tf
, new_stmt
, -1, false, gimple_location (stmt
));
703 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
704 was in fact structured, and we've not yet done jump threading, then none
705 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
708 verify_norecord_switch_expr (struct leh_state
*state
,
709 gswitch
*switch_expr
)
711 struct leh_tf_state
*tf
= state
->tf
;
717 n
= gimple_switch_num_labels (switch_expr
);
719 for (i
= 0; i
< n
; ++i
)
722 tree lab
= CASE_LABEL (gimple_switch_label (switch_expr
, i
));
724 gcc_assert (!outside_finally_tree (temp
, tf
->try_finally_expr
));
728 #define verify_norecord_switch_expr(state, switch_expr)
731 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
732 non-null, insert it before the new branch. */
735 do_return_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
)
739 /* In the case of a return, the queue node must be a gimple statement. */
740 gcc_assert (!q
->is_label
);
742 /* Note that the return value may have already been computed, e.g.,
755 should return 0, not 1. We don't have to do anything to make
756 this happens because the return value has been placed in the
757 RESULT_DECL already. */
759 q
->cont_stmt
= q
->stmt
.g
;
762 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
764 x
= gimple_build_goto (finlab
);
765 gimple_set_location (x
, q
->location
);
766 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
769 /* Similar, but easier, for GIMPLE_GOTO. */
772 do_goto_redirection (struct goto_queue_node
*q
, tree finlab
, gimple_seq mod
,
773 struct leh_tf_state
*tf
)
777 gcc_assert (q
->is_label
);
779 q
->cont_stmt
= gimple_build_goto (tf
->dest_array
[q
->index
]);
782 gimple_seq_add_seq (&q
->repl_stmt
, mod
);
784 x
= gimple_build_goto (finlab
);
785 gimple_set_location (x
, q
->location
);
786 gimple_seq_add_stmt (&q
->repl_stmt
, x
);
789 /* Emit a standard landing pad sequence into SEQ for REGION. */
792 emit_post_landing_pad (gimple_seq
*seq
, eh_region region
)
794 eh_landing_pad lp
= region
->landing_pads
;
798 lp
= gen_eh_landing_pad (region
);
800 lp
->post_landing_pad
= create_artificial_label (UNKNOWN_LOCATION
);
801 EH_LANDING_PAD_NR (lp
->post_landing_pad
) = lp
->index
;
803 x
= gimple_build_label (lp
->post_landing_pad
);
804 gimple_seq_add_stmt (seq
, x
);
807 /* Emit a RESX statement into SEQ for REGION. */
810 emit_resx (gimple_seq
*seq
, eh_region region
)
812 gresx
*x
= gimple_build_resx (region
->index
);
813 gimple_seq_add_stmt (seq
, x
);
815 record_stmt_eh_region (region
->outer
, x
);
818 /* Note that the current EH region may contain a throw, or a
819 call to a function which itself may contain a throw. */
822 note_eh_region_may_contain_throw (eh_region region
)
824 while (bitmap_set_bit (eh_region_may_contain_throw_map
, region
->index
))
826 if (region
->type
== ERT_MUST_NOT_THROW
)
828 region
= region
->outer
;
834 /* Check if REGION has been marked as containing a throw. If REGION is
835 NULL, this predicate is false. */
838 eh_region_may_contain_throw (eh_region r
)
840 return r
&& bitmap_bit_p (eh_region_may_contain_throw_map
, r
->index
);
843 /* We want to transform
844 try { body; } catch { stuff; }
854 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
855 should be placed before the second operand, or NULL. OVER is
856 an existing label that should be put at the exit, or NULL. */
859 frob_into_branch_around (gtry
*tp
, eh_region region
, tree over
)
862 gimple_seq cleanup
, result
;
863 location_t loc
= gimple_location (tp
);
865 cleanup
= gimple_try_cleanup (tp
);
866 result
= gimple_try_eval (tp
);
869 emit_post_landing_pad (&eh_seq
, region
);
871 if (gimple_seq_may_fallthru (cleanup
))
874 over
= create_artificial_label (loc
);
875 x
= gimple_build_goto (over
);
876 gimple_set_location (x
, loc
);
877 gimple_seq_add_stmt (&cleanup
, x
);
879 gimple_seq_add_seq (&eh_seq
, cleanup
);
883 x
= gimple_build_label (over
);
884 gimple_seq_add_stmt (&result
, x
);
889 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
890 Make sure to record all new labels found. */
893 lower_try_finally_dup_block (gimple_seq seq
, struct leh_state
*outer_state
,
898 gimple_stmt_iterator gsi
;
900 new_seq
= copy_gimple_seq_and_replace_locals (seq
);
902 for (gsi
= gsi_start (new_seq
); !gsi_end_p (gsi
); gsi_next (&gsi
))
904 gimple
*stmt
= gsi_stmt (gsi
);
905 /* We duplicate __builtin_stack_restore at -O0 in the hope of eliminating
906 it on the EH paths. When it is not eliminated, make it transparent in
908 if (gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
909 gimple_set_location (stmt
, UNKNOWN_LOCATION
);
910 else if (LOCATION_LOCUS (gimple_location (stmt
)) == UNKNOWN_LOCATION
)
912 tree block
= gimple_block (stmt
);
913 gimple_set_location (stmt
, loc
);
914 gimple_set_block (stmt
, block
);
919 region
= outer_state
->tf
->try_finally_expr
;
920 collect_finally_tree_1 (new_seq
, region
);
925 /* A subroutine of lower_try_finally. Create a fallthru label for
926 the given try_finally state. The only tricky bit here is that
927 we have to make sure to record the label in our outer context. */
930 lower_try_finally_fallthru_label (struct leh_tf_state
*tf
)
932 tree label
= tf
->fallthru_label
;
937 label
= create_artificial_label (gimple_location (tf
->try_finally_expr
));
938 tf
->fallthru_label
= label
;
942 record_in_finally_tree (temp
, tf
->outer
->tf
->try_finally_expr
);
948 /* A subroutine of lower_try_finally. If FINALLY consits of a
949 GIMPLE_EH_ELSE node, return it. */
951 static inline geh_else
*
952 get_eh_else (gimple_seq finally
)
954 gimple
*x
= gimple_seq_first_stmt (finally
);
955 if (gimple_code (x
) == GIMPLE_EH_ELSE
)
957 gcc_assert (gimple_seq_singleton_p (finally
));
958 return as_a
<geh_else
*> (x
);
963 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
964 langhook returns non-null, then the language requires that the exception
965 path out of a try_finally be treated specially. To wit: the code within
966 the finally block may not itself throw an exception. We have two choices
967 here. First we can duplicate the finally block and wrap it in a
968 must_not_throw region. Second, we can generate code like
973 if (fintmp == eh_edge)
974 protect_cleanup_actions;
977 where "fintmp" is the temporary used in the switch statement generation
978 alternative considered below. For the nonce, we always choose the first
981 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
984 honor_protect_cleanup_actions (struct leh_state
*outer_state
,
985 struct leh_state
*this_state
,
986 struct leh_tf_state
*tf
)
988 gimple_seq finally
= gimple_try_cleanup (tf
->top_p
);
990 /* EH_ELSE doesn't come from user code; only compiler generated stuff.
991 It does need to be handled here, so as to separate the (different)
992 EH path from the normal path. But we should not attempt to wrap
993 it with a must-not-throw node (which indeed gets in the way). */
994 if (geh_else
*eh_else
= get_eh_else (finally
))
996 gimple_try_set_cleanup (tf
->top_p
, gimple_eh_else_n_body (eh_else
));
997 finally
= gimple_eh_else_e_body (eh_else
);
999 /* Let the ELSE see the exception that's being processed, but
1000 since the cleanup is outside the try block, process it with
1001 outer_state, otherwise it may be used as a cleanup for
1002 itself, and Bad Things (TM) ensue. */
1003 eh_region save_ehp
= outer_state
->ehp_region
;
1004 outer_state
->ehp_region
= this_state
->cur_region
;
1005 lower_eh_constructs_1 (outer_state
, &finally
);
1006 outer_state
->ehp_region
= save_ehp
;
1010 /* First check for nothing to do. */
1011 if (lang_hooks
.eh_protect_cleanup_actions
== NULL
)
1013 tree actions
= lang_hooks
.eh_protect_cleanup_actions ();
1014 if (actions
== NULL
)
1018 finally
= lower_try_finally_dup_block (finally
, outer_state
,
1019 gimple_location (tf
->try_finally_expr
));
1021 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1022 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1023 to be in an enclosing scope, but needs to be implemented at this level
1024 to avoid a nesting violation (see wrap_temporary_cleanups in
1025 cp/decl.c). Since it's logically at an outer level, we should call
1026 terminate before we get to it, so strip it away before adding the
1027 MUST_NOT_THROW filter. */
1028 gimple_stmt_iterator gsi
= gsi_start (finally
);
1029 gimple
*x
= gsi_stmt (gsi
);
1030 if (gimple_code (x
) == GIMPLE_TRY
1031 && gimple_try_kind (x
) == GIMPLE_TRY_CATCH
1032 && gimple_try_catch_is_cleanup (x
))
1034 gsi_insert_seq_before (&gsi
, gimple_try_eval (x
), GSI_SAME_STMT
);
1035 gsi_remove (&gsi
, false);
1038 /* Wrap the block with protect_cleanup_actions as the action. */
1039 geh_mnt
*eh_mnt
= gimple_build_eh_must_not_throw (actions
);
1040 gtry
*try_stmt
= gimple_build_try (finally
,
1041 gimple_seq_alloc_with_stmt (eh_mnt
),
1043 finally
= lower_eh_must_not_throw (outer_state
, try_stmt
);
1046 /* Drop all of this into the exception sequence. */
1047 emit_post_landing_pad (&eh_seq
, tf
->region
);
1048 gimple_seq_add_seq (&eh_seq
, finally
);
1049 if (gimple_seq_may_fallthru (finally
))
1050 emit_resx (&eh_seq
, tf
->region
);
1052 /* Having now been handled, EH isn't to be considered with
1053 the rest of the outgoing edges. */
1054 tf
->may_throw
= false;
1057 /* A subroutine of lower_try_finally. We have determined that there is
1058 no fallthru edge out of the finally block. This means that there is
1059 no outgoing edge corresponding to any incoming edge. Restructure the
1060 try_finally node for this special case. */
1063 lower_try_finally_nofallthru (struct leh_state
*state
,
1064 struct leh_tf_state
*tf
)
1070 struct goto_queue_node
*q
, *qe
;
1072 lab
= create_artificial_label (gimple_location (tf
->try_finally_expr
));
1074 /* We expect that tf->top_p is a GIMPLE_TRY. */
1075 finally
= gimple_try_cleanup (tf
->top_p
);
1076 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1078 x
= gimple_build_label (lab
);
1079 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1082 qe
= q
+ tf
->goto_queue_active
;
1085 do_return_redirection (q
, lab
, NULL
);
1087 do_goto_redirection (q
, lab
, NULL
, tf
);
1089 replace_goto_queue (tf
);
1091 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1092 eh_else
= get_eh_else (finally
);
1095 finally
= gimple_eh_else_n_body (eh_else
);
1096 lower_eh_constructs_1 (state
, &finally
);
1097 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1101 finally
= gimple_eh_else_e_body (eh_else
);
1102 lower_eh_constructs_1 (state
, &finally
);
1104 emit_post_landing_pad (&eh_seq
, tf
->region
);
1105 gimple_seq_add_seq (&eh_seq
, finally
);
1110 lower_eh_constructs_1 (state
, &finally
);
1111 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1115 emit_post_landing_pad (&eh_seq
, tf
->region
);
1117 x
= gimple_build_goto (lab
);
1118 gimple_set_location (x
, gimple_location (tf
->try_finally_expr
));
1119 gimple_seq_add_stmt (&eh_seq
, x
);
1124 /* A subroutine of lower_try_finally. We have determined that there is
1125 exactly one destination of the finally block. Restructure the
1126 try_finally node for this special case. */
1129 lower_try_finally_onedest (struct leh_state
*state
, struct leh_tf_state
*tf
)
1131 struct goto_queue_node
*q
, *qe
;
1136 gimple_stmt_iterator gsi
;
1138 location_t loc
= gimple_location (tf
->try_finally_expr
);
1140 finally
= gimple_try_cleanup (tf
->top_p
);
1141 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1143 /* Since there's only one destination, and the destination edge can only
1144 either be EH or non-EH, that implies that all of our incoming edges
1145 are of the same type. Therefore we can lower EH_ELSE immediately. */
1146 eh_else
= get_eh_else (finally
);
1150 finally
= gimple_eh_else_e_body (eh_else
);
1152 finally
= gimple_eh_else_n_body (eh_else
);
1155 lower_eh_constructs_1 (state
, &finally
);
1157 for (gsi
= gsi_start (finally
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1159 gimple
*stmt
= gsi_stmt (gsi
);
1160 if (LOCATION_LOCUS (gimple_location (stmt
)) == UNKNOWN_LOCATION
)
1162 tree block
= gimple_block (stmt
);
1163 gimple_set_location (stmt
, gimple_location (tf
->try_finally_expr
));
1164 gimple_set_block (stmt
, block
);
1170 /* Only reachable via the exception edge. Add the given label to
1171 the head of the FINALLY block. Append a RESX at the end. */
1172 emit_post_landing_pad (&eh_seq
, tf
->region
);
1173 gimple_seq_add_seq (&eh_seq
, finally
);
1174 emit_resx (&eh_seq
, tf
->region
);
1178 if (tf
->may_fallthru
)
1180 /* Only reachable via the fallthru edge. Do nothing but let
1181 the two blocks run together; we'll fall out the bottom. */
1182 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1186 finally_label
= create_artificial_label (loc
);
1187 label_stmt
= gimple_build_label (finally_label
);
1188 gimple_seq_add_stmt (&tf
->top_p_seq
, label_stmt
);
1190 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1193 qe
= q
+ tf
->goto_queue_active
;
1197 /* Reachable by return expressions only. Redirect them. */
1199 do_return_redirection (q
, finally_label
, NULL
);
1200 replace_goto_queue (tf
);
1204 /* Reachable by goto expressions only. Redirect them. */
1206 do_goto_redirection (q
, finally_label
, NULL
, tf
);
1207 replace_goto_queue (tf
);
1209 if (tf
->dest_array
[0] == tf
->fallthru_label
)
1211 /* Reachable by goto to fallthru label only. Redirect it
1212 to the new label (already created, sadly), and do not
1213 emit the final branch out, or the fallthru label. */
1214 tf
->fallthru_label
= NULL
;
1219 /* Place the original return/goto to the original destination
1220 immediately after the finally block. */
1221 x
= tf
->goto_queue
[0].cont_stmt
;
1222 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1223 maybe_record_in_goto_queue (state
, x
);
1226 /* A subroutine of lower_try_finally. There are multiple edges incoming
1227 and outgoing from the finally block. Implement this by duplicating the
1228 finally block for every destination. */
1231 lower_try_finally_copy (struct leh_state
*state
, struct leh_tf_state
*tf
)
1234 gimple_seq new_stmt
;
1239 location_t tf_loc
= gimple_location (tf
->try_finally_expr
);
1241 finally
= gimple_try_cleanup (tf
->top_p
);
1243 /* Notice EH_ELSE, and simplify some of the remaining code
1244 by considering FINALLY to be the normal return path only. */
1245 eh_else
= get_eh_else (finally
);
1247 finally
= gimple_eh_else_n_body (eh_else
);
1249 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1252 if (tf
->may_fallthru
)
1254 seq
= lower_try_finally_dup_block (finally
, state
, tf_loc
);
1255 lower_eh_constructs_1 (state
, &seq
);
1256 gimple_seq_add_seq (&new_stmt
, seq
);
1258 tmp
= lower_try_finally_fallthru_label (tf
);
1259 x
= gimple_build_goto (tmp
);
1260 gimple_set_location (x
, tf_loc
);
1261 gimple_seq_add_stmt (&new_stmt
, x
);
1266 /* We don't need to copy the EH path of EH_ELSE,
1267 since it is only emitted once. */
1269 seq
= gimple_eh_else_e_body (eh_else
);
1271 seq
= lower_try_finally_dup_block (finally
, state
, tf_loc
);
1272 lower_eh_constructs_1 (state
, &seq
);
1274 emit_post_landing_pad (&eh_seq
, tf
->region
);
1275 gimple_seq_add_seq (&eh_seq
, seq
);
1276 emit_resx (&eh_seq
, tf
->region
);
1281 struct goto_queue_node
*q
, *qe
;
1282 int return_index
, index
;
1285 struct goto_queue_node
*q
;
1289 return_index
= tf
->dest_array
.length ();
1290 labels
= XCNEWVEC (struct labels_s
, return_index
+ 1);
1293 qe
= q
+ tf
->goto_queue_active
;
1296 index
= q
->index
< 0 ? return_index
: q
->index
;
1298 if (!labels
[index
].q
)
1299 labels
[index
].q
= q
;
1302 for (index
= 0; index
< return_index
+ 1; index
++)
1306 q
= labels
[index
].q
;
1310 lab
= labels
[index
].label
1311 = create_artificial_label (tf_loc
);
1313 if (index
== return_index
)
1314 do_return_redirection (q
, lab
, NULL
);
1316 do_goto_redirection (q
, lab
, NULL
, tf
);
1318 x
= gimple_build_label (lab
);
1319 gimple_seq_add_stmt (&new_stmt
, x
);
1321 seq
= lower_try_finally_dup_block (finally
, state
, q
->location
);
1322 lower_eh_constructs_1 (state
, &seq
);
1323 gimple_seq_add_seq (&new_stmt
, seq
);
1325 gimple_seq_add_stmt (&new_stmt
, q
->cont_stmt
);
1326 maybe_record_in_goto_queue (state
, q
->cont_stmt
);
1329 for (q
= tf
->goto_queue
; q
< qe
; q
++)
1333 index
= q
->index
< 0 ? return_index
: q
->index
;
1335 if (labels
[index
].q
== q
)
1338 lab
= labels
[index
].label
;
1340 if (index
== return_index
)
1341 do_return_redirection (q
, lab
, NULL
);
1343 do_goto_redirection (q
, lab
, NULL
, tf
);
1346 replace_goto_queue (tf
);
1350 /* Need to link new stmts after running replace_goto_queue due
1351 to not wanting to process the same goto stmts twice. */
1352 gimple_seq_add_seq (&tf
->top_p_seq
, new_stmt
);
1355 /* A subroutine of lower_try_finally. There are multiple edges incoming
1356 and outgoing from the finally block. Implement this by instrumenting
1357 each incoming edge and creating a switch statement at the end of the
1358 finally block that branches to the appropriate destination. */
1361 lower_try_finally_switch (struct leh_state
*state
, struct leh_tf_state
*tf
)
1363 struct goto_queue_node
*q
, *qe
;
1364 tree finally_tmp
, finally_label
;
1365 int return_index
, eh_index
, fallthru_index
;
1366 int nlabels
, ndests
, j
, last_case_index
;
1368 auto_vec
<tree
> case_label_vec
;
1369 gimple_seq switch_body
= NULL
;
1373 gimple
*switch_stmt
;
1375 hash_map
<tree
, gimple
*> *cont_map
= NULL
;
1376 /* The location of the TRY_FINALLY stmt. */
1377 location_t tf_loc
= gimple_location (tf
->try_finally_expr
);
1378 /* The location of the finally block. */
1379 location_t finally_loc
;
1381 finally
= gimple_try_cleanup (tf
->top_p
);
1382 eh_else
= get_eh_else (finally
);
1384 /* Mash the TRY block to the head of the chain. */
1385 tf
->top_p_seq
= gimple_try_eval (tf
->top_p
);
1387 /* The location of the finally is either the last stmt in the finally
1388 block or the location of the TRY_FINALLY itself. */
1389 x
= gimple_seq_last_stmt (finally
);
1390 finally_loc
= x
? gimple_location (x
) : tf_loc
;
1392 /* Prepare for switch statement generation. */
1393 nlabels
= tf
->dest_array
.length ();
1394 return_index
= nlabels
;
1395 eh_index
= return_index
+ tf
->may_return
;
1396 fallthru_index
= eh_index
+ (tf
->may_throw
&& !eh_else
);
1397 ndests
= fallthru_index
+ tf
->may_fallthru
;
1399 finally_tmp
= create_tmp_var (integer_type_node
, "finally_tmp");
1400 finally_label
= create_artificial_label (finally_loc
);
1402 /* We use vec::quick_push on case_label_vec throughout this function,
1403 since we know the size in advance and allocate precisely as muce
1405 case_label_vec
.create (ndests
);
1407 last_case_index
= 0;
1409 /* Begin inserting code for getting to the finally block. Things
1410 are done in this order to correspond to the sequence the code is
1413 if (tf
->may_fallthru
)
1415 x
= gimple_build_assign (finally_tmp
,
1416 build_int_cst (integer_type_node
,
1418 gimple_set_location (x
, finally_loc
);
1419 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1421 tmp
= build_int_cst (integer_type_node
, fallthru_index
);
1422 last_case
= build_case_label (tmp
, NULL
,
1423 create_artificial_label (finally_loc
));
1424 case_label_vec
.quick_push (last_case
);
1427 x
= gimple_build_label (CASE_LABEL (last_case
));
1428 gimple_seq_add_stmt (&switch_body
, x
);
1430 tmp
= lower_try_finally_fallthru_label (tf
);
1431 x
= gimple_build_goto (tmp
);
1432 gimple_set_location (x
, finally_loc
);
1433 gimple_seq_add_stmt (&switch_body
, x
);
1436 /* For EH_ELSE, emit the exception path (plus resx) now, then
1437 subsequently we only need consider the normal path. */
1442 finally
= gimple_eh_else_e_body (eh_else
);
1443 lower_eh_constructs_1 (state
, &finally
);
1445 emit_post_landing_pad (&eh_seq
, tf
->region
);
1446 gimple_seq_add_seq (&eh_seq
, finally
);
1447 emit_resx (&eh_seq
, tf
->region
);
1450 finally
= gimple_eh_else_n_body (eh_else
);
1452 else if (tf
->may_throw
)
1454 emit_post_landing_pad (&eh_seq
, tf
->region
);
1456 x
= gimple_build_assign (finally_tmp
,
1457 build_int_cst (integer_type_node
, eh_index
));
1458 gimple_seq_add_stmt (&eh_seq
, x
);
1460 x
= gimple_build_goto (finally_label
);
1461 gimple_set_location (x
, tf_loc
);
1462 gimple_seq_add_stmt (&eh_seq
, x
);
1464 tmp
= build_int_cst (integer_type_node
, eh_index
);
1465 last_case
= build_case_label (tmp
, NULL
,
1466 create_artificial_label (tf_loc
));
1467 case_label_vec
.quick_push (last_case
);
1470 x
= gimple_build_label (CASE_LABEL (last_case
));
1471 gimple_seq_add_stmt (&eh_seq
, x
);
1472 emit_resx (&eh_seq
, tf
->region
);
1475 x
= gimple_build_label (finally_label
);
1476 gimple_seq_add_stmt (&tf
->top_p_seq
, x
);
1478 lower_eh_constructs_1 (state
, &finally
);
1479 gimple_seq_add_seq (&tf
->top_p_seq
, finally
);
1481 /* Redirect each incoming goto edge. */
1483 qe
= q
+ tf
->goto_queue_active
;
1484 j
= last_case_index
+ tf
->may_return
;
1485 /* Prepare the assignments to finally_tmp that are executed upon the
1486 entrance through a particular edge. */
1489 gimple_seq mod
= NULL
;
1491 unsigned int case_index
;
1495 x
= gimple_build_assign (finally_tmp
,
1496 build_int_cst (integer_type_node
,
1498 gimple_seq_add_stmt (&mod
, x
);
1499 do_return_redirection (q
, finally_label
, mod
);
1500 switch_id
= return_index
;
1504 x
= gimple_build_assign (finally_tmp
,
1505 build_int_cst (integer_type_node
, q
->index
));
1506 gimple_seq_add_stmt (&mod
, x
);
1507 do_goto_redirection (q
, finally_label
, mod
, tf
);
1508 switch_id
= q
->index
;
1511 case_index
= j
+ q
->index
;
1512 if (case_label_vec
.length () <= case_index
|| !case_label_vec
[case_index
])
1515 tmp
= build_int_cst (integer_type_node
, switch_id
);
1516 case_lab
= build_case_label (tmp
, NULL
,
1517 create_artificial_label (tf_loc
));
1518 /* We store the cont_stmt in the pointer map, so that we can recover
1519 it in the loop below. */
1521 cont_map
= new hash_map
<tree
, gimple
*>;
1522 cont_map
->put (case_lab
, q
->cont_stmt
);
1523 case_label_vec
.quick_push (case_lab
);
1526 for (j
= last_case_index
; j
< last_case_index
+ nlabels
; j
++)
1530 last_case
= case_label_vec
[j
];
1532 gcc_assert (last_case
);
1533 gcc_assert (cont_map
);
1535 cont_stmt
= *cont_map
->get (last_case
);
1537 x
= gimple_build_label (CASE_LABEL (last_case
));
1538 gimple_seq_add_stmt (&switch_body
, x
);
1539 gimple_seq_add_stmt (&switch_body
, cont_stmt
);
1540 maybe_record_in_goto_queue (state
, cont_stmt
);
1545 replace_goto_queue (tf
);
1547 /* Make sure that the last case is the default label, as one is required.
1548 Then sort the labels, which is also required in GIMPLE. */
1549 CASE_LOW (last_case
) = NULL
;
1550 tree tem
= case_label_vec
.pop ();
1551 gcc_assert (tem
== last_case
);
1552 sort_case_labels (case_label_vec
);
1554 /* Build the switch statement, setting last_case to be the default
1556 switch_stmt
= gimple_build_switch (finally_tmp
, last_case
,
1558 gimple_set_location (switch_stmt
, finally_loc
);
1560 /* Need to link SWITCH_STMT after running replace_goto_queue
1561 due to not wanting to process the same goto stmts twice. */
1562 gimple_seq_add_stmt (&tf
->top_p_seq
, switch_stmt
);
1563 gimple_seq_add_seq (&tf
->top_p_seq
, switch_body
);
1566 /* Decide whether or not we are going to duplicate the finally block.
1567 There are several considerations.
1569 Second, we'd like to prevent egregious code growth. One way to
1570 do this is to estimate the size of the finally block, multiply
1571 that by the number of copies we'd need to make, and compare against
1572 the estimate of the size of the switch machinery we'd have to add. */
1575 decide_copy_try_finally (int ndests
, bool may_throw
, gimple_seq finally
)
1577 int f_estimate
, sw_estimate
;
1580 /* If there's an EH_ELSE involved, the exception path is separate
1581 and really doesn't come into play for this computation. */
1582 eh_else
= get_eh_else (finally
);
1585 ndests
-= may_throw
;
1586 finally
= gimple_eh_else_n_body (eh_else
);
1591 gimple_stmt_iterator gsi
;
1596 for (gsi
= gsi_start (finally
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1598 /* Duplicate __builtin_stack_restore in the hope of eliminating it
1599 on the EH paths and, consequently, useless cleanups. */
1600 gimple
*stmt
= gsi_stmt (gsi
);
1601 if (!is_gimple_debug (stmt
)
1602 && !gimple_clobber_p (stmt
)
1603 && !gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
1609 /* Finally estimate N times, plus N gotos. */
1610 f_estimate
= estimate_num_insns_seq (finally
, &eni_size_weights
);
1611 f_estimate
= (f_estimate
+ 1) * ndests
;
1613 /* Switch statement (cost 10), N variable assignments, N gotos. */
1614 sw_estimate
= 10 + 2 * ndests
;
1616 /* Optimize for size clearly wants our best guess. */
1617 if (optimize_function_for_size_p (cfun
))
1618 return f_estimate
< sw_estimate
;
1620 /* ??? These numbers are completely made up so far. */
1622 return f_estimate
< 100 || f_estimate
< sw_estimate
* 2;
1624 return f_estimate
< 40 || f_estimate
* 2 < sw_estimate
* 3;
1627 /* REG is the enclosing region for a possible cleanup region, or the region
1628 itself. Returns TRUE if such a region would be unreachable.
1630 Cleanup regions within a must-not-throw region aren't actually reachable
1631 even if there are throwing stmts within them, because the personality
1632 routine will call terminate before unwinding. */
1635 cleanup_is_dead_in (eh_region reg
)
1637 while (reg
&& reg
->type
== ERT_CLEANUP
)
1639 return (reg
&& reg
->type
== ERT_MUST_NOT_THROW
);
1642 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1643 to a sequence of labels and blocks, plus the exception region trees
1644 that record all the magic. This is complicated by the need to
1645 arrange for the FINALLY block to be executed on all exits. */
1648 lower_try_finally (struct leh_state
*state
, gtry
*tp
)
1650 struct leh_tf_state this_tf
;
1651 struct leh_state this_state
;
1653 gimple_seq old_eh_seq
;
1655 /* Process the try block. */
1657 memset (&this_tf
, 0, sizeof (this_tf
));
1658 this_tf
.try_finally_expr
= tp
;
1660 this_tf
.outer
= state
;
1661 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state
->cur_region
))
1663 this_tf
.region
= gen_eh_region_cleanup (state
->cur_region
);
1664 this_state
.cur_region
= this_tf
.region
;
1668 this_tf
.region
= NULL
;
1669 this_state
.cur_region
= state
->cur_region
;
1672 this_state
.ehp_region
= state
->ehp_region
;
1673 this_state
.tf
= &this_tf
;
1675 old_eh_seq
= eh_seq
;
1678 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1680 /* Determine if the try block is escaped through the bottom. */
1681 this_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1683 /* Determine if any exceptions are possible within the try block. */
1685 this_tf
.may_throw
= eh_region_may_contain_throw (this_tf
.region
);
1686 if (this_tf
.may_throw
)
1687 honor_protect_cleanup_actions (state
, &this_state
, &this_tf
);
1689 /* Determine how many edges (still) reach the finally block. Or rather,
1690 how many destinations are reached by the finally block. Use this to
1691 determine how we process the finally block itself. */
1693 ndests
= this_tf
.dest_array
.length ();
1694 ndests
+= this_tf
.may_fallthru
;
1695 ndests
+= this_tf
.may_return
;
1696 ndests
+= this_tf
.may_throw
;
1698 /* If the FINALLY block is not reachable, dike it out. */
1701 gimple_seq_add_seq (&this_tf
.top_p_seq
, gimple_try_eval (tp
));
1702 gimple_try_set_cleanup (tp
, NULL
);
1704 /* If the finally block doesn't fall through, then any destination
1705 we might try to impose there isn't reached either. There may be
1706 some minor amount of cleanup and redirection still needed. */
1707 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp
)))
1708 lower_try_finally_nofallthru (state
, &this_tf
);
1710 /* We can easily special-case redirection to a single destination. */
1711 else if (ndests
== 1)
1712 lower_try_finally_onedest (state
, &this_tf
);
1713 else if (decide_copy_try_finally (ndests
, this_tf
.may_throw
,
1714 gimple_try_cleanup (tp
)))
1715 lower_try_finally_copy (state
, &this_tf
);
1717 lower_try_finally_switch (state
, &this_tf
);
1719 /* If someone requested we add a label at the end of the transformed
1721 if (this_tf
.fallthru_label
)
1723 /* This must be reached only if ndests == 0. */
1724 gimple
*x
= gimple_build_label (this_tf
.fallthru_label
);
1725 gimple_seq_add_stmt (&this_tf
.top_p_seq
, x
);
1728 this_tf
.dest_array
.release ();
1729 free (this_tf
.goto_queue
);
1730 if (this_tf
.goto_queue_map
)
1731 delete this_tf
.goto_queue_map
;
1733 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1734 If there was no old eh_seq, then the append is trivially already done. */
1738 eh_seq
= old_eh_seq
;
1741 gimple_seq new_eh_seq
= eh_seq
;
1742 eh_seq
= old_eh_seq
;
1743 gimple_seq_add_seq (&eh_seq
, new_eh_seq
);
1747 return this_tf
.top_p_seq
;
1750 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1751 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1752 exception region trees that records all the magic. */
1755 lower_catch (struct leh_state
*state
, gtry
*tp
)
1757 eh_region try_region
= NULL
;
1758 struct leh_state this_state
= *state
;
1759 gimple_stmt_iterator gsi
;
1761 gimple_seq new_seq
, cleanup
;
1763 geh_dispatch
*eh_dispatch
;
1764 location_t try_catch_loc
= gimple_location (tp
);
1765 location_t catch_loc
= UNKNOWN_LOCATION
;
1767 if (flag_exceptions
)
1769 try_region
= gen_eh_region_try (state
->cur_region
);
1770 this_state
.cur_region
= try_region
;
1773 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1775 if (!eh_region_may_contain_throw (try_region
))
1776 return gimple_try_eval (tp
);
1779 eh_dispatch
= gimple_build_eh_dispatch (try_region
->index
);
1780 gimple_seq_add_stmt (&new_seq
, eh_dispatch
);
1781 emit_resx (&new_seq
, try_region
);
1783 this_state
.cur_region
= state
->cur_region
;
1784 this_state
.ehp_region
= try_region
;
1786 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1787 itself, so that e.g. for coverage purposes the nested cleanups don't
1788 appear before the cleanup body. See PR64634 for details. */
1789 gimple_seq old_eh_seq
= eh_seq
;
1793 cleanup
= gimple_try_cleanup (tp
);
1794 for (gsi
= gsi_start (cleanup
);
1802 catch_stmt
= as_a
<gcatch
*> (gsi_stmt (gsi
));
1803 if (catch_loc
== UNKNOWN_LOCATION
)
1804 catch_loc
= gimple_location (catch_stmt
);
1805 c
= gen_eh_region_catch (try_region
, gimple_catch_types (catch_stmt
));
1807 handler
= gimple_catch_handler (catch_stmt
);
1808 lower_eh_constructs_1 (&this_state
, &handler
);
1810 c
->label
= create_artificial_label (UNKNOWN_LOCATION
);
1811 x
= gimple_build_label (c
->label
);
1812 gimple_seq_add_stmt (&new_seq
, x
);
1814 gimple_seq_add_seq (&new_seq
, handler
);
1816 if (gimple_seq_may_fallthru (new_seq
))
1819 out_label
= create_artificial_label (try_catch_loc
);
1821 x
= gimple_build_goto (out_label
);
1822 gimple_seq_add_stmt (&new_seq
, x
);
1828 /* Try to set a location on the dispatching construct to avoid inheriting
1829 the location of the previous statement. */
1830 gimple_set_location (eh_dispatch
, catch_loc
);
1832 gimple_try_set_cleanup (tp
, new_seq
);
1834 gimple_seq new_eh_seq
= eh_seq
;
1835 eh_seq
= old_eh_seq
;
1836 gimple_seq ret_seq
= frob_into_branch_around (tp
, try_region
, out_label
);
1837 gimple_seq_add_seq (&eh_seq
, new_eh_seq
);
1841 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1842 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1843 region trees that record all the magic. */
1846 lower_eh_filter (struct leh_state
*state
, gtry
*tp
)
1848 struct leh_state this_state
= *state
;
1849 eh_region this_region
= NULL
;
1853 inner
= gimple_seq_first_stmt (gimple_try_cleanup (tp
));
1855 if (flag_exceptions
)
1857 this_region
= gen_eh_region_allowed (state
->cur_region
,
1858 gimple_eh_filter_types (inner
));
1859 this_state
.cur_region
= this_region
;
1862 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1864 if (!eh_region_may_contain_throw (this_region
))
1865 return gimple_try_eval (tp
);
1867 this_state
.cur_region
= state
->cur_region
;
1868 this_state
.ehp_region
= this_region
;
1871 x
= gimple_build_eh_dispatch (this_region
->index
);
1872 gimple_set_location (x
, gimple_location (tp
));
1873 gimple_seq_add_stmt (&new_seq
, x
);
1874 emit_resx (&new_seq
, this_region
);
1876 this_region
->u
.allowed
.label
= create_artificial_label (UNKNOWN_LOCATION
);
1877 x
= gimple_build_label (this_region
->u
.allowed
.label
);
1878 gimple_seq_add_stmt (&new_seq
, x
);
1880 lower_eh_constructs_1 (&this_state
, gimple_eh_filter_failure_ptr (inner
));
1881 gimple_seq_add_seq (&new_seq
, gimple_eh_filter_failure (inner
));
1883 gimple_try_set_cleanup (tp
, new_seq
);
1885 return frob_into_branch_around (tp
, this_region
, NULL
);
1888 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1889 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1890 plus the exception region trees that record all the magic. */
1893 lower_eh_must_not_throw (struct leh_state
*state
, gtry
*tp
)
1895 struct leh_state this_state
= *state
;
1897 if (flag_exceptions
)
1899 gimple
*inner
= gimple_seq_first_stmt (gimple_try_cleanup (tp
));
1900 eh_region this_region
;
1902 this_region
= gen_eh_region_must_not_throw (state
->cur_region
);
1903 this_region
->u
.must_not_throw
.failure_decl
1904 = gimple_eh_must_not_throw_fndecl (
1905 as_a
<geh_mnt
*> (inner
));
1906 this_region
->u
.must_not_throw
.failure_loc
1907 = LOCATION_LOCUS (gimple_location (tp
));
1909 /* In order to get mangling applied to this decl, we must mark it
1910 used now. Otherwise, pass_ipa_free_lang_data won't think it
1912 TREE_USED (this_region
->u
.must_not_throw
.failure_decl
) = 1;
1914 this_state
.cur_region
= this_region
;
1917 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1919 return gimple_try_eval (tp
);
1922 /* Implement a cleanup expression. This is similar to try-finally,
1923 except that we only execute the cleanup block for exception edges. */
1926 lower_cleanup (struct leh_state
*state
, gtry
*tp
)
1928 struct leh_state this_state
= *state
;
1929 eh_region this_region
= NULL
;
1930 struct leh_tf_state fake_tf
;
1932 bool cleanup_dead
= cleanup_is_dead_in (state
->cur_region
);
1934 if (flag_exceptions
&& !cleanup_dead
)
1936 this_region
= gen_eh_region_cleanup (state
->cur_region
);
1937 this_state
.cur_region
= this_region
;
1940 lower_eh_constructs_1 (&this_state
, gimple_try_eval_ptr (tp
));
1942 if (cleanup_dead
|| !eh_region_may_contain_throw (this_region
))
1943 return gimple_try_eval (tp
);
1945 /* Build enough of a try-finally state so that we can reuse
1946 honor_protect_cleanup_actions. */
1947 memset (&fake_tf
, 0, sizeof (fake_tf
));
1948 fake_tf
.top_p
= fake_tf
.try_finally_expr
= tp
;
1949 fake_tf
.outer
= state
;
1950 fake_tf
.region
= this_region
;
1951 fake_tf
.may_fallthru
= gimple_seq_may_fallthru (gimple_try_eval (tp
));
1952 fake_tf
.may_throw
= true;
1954 honor_protect_cleanup_actions (state
, NULL
, &fake_tf
);
1956 if (fake_tf
.may_throw
)
1958 /* In this case honor_protect_cleanup_actions had nothing to do,
1959 and we should process this normally. */
1960 lower_eh_constructs_1 (state
, gimple_try_cleanup_ptr (tp
));
1961 result
= frob_into_branch_around (tp
, this_region
,
1962 fake_tf
.fallthru_label
);
1966 /* In this case honor_protect_cleanup_actions did nearly all of
1967 the work. All we have left is to append the fallthru_label. */
1969 result
= gimple_try_eval (tp
);
1970 if (fake_tf
.fallthru_label
)
1972 gimple
*x
= gimple_build_label (fake_tf
.fallthru_label
);
1973 gimple_seq_add_stmt (&result
, x
);
1979 /* Main loop for lowering eh constructs. Also moves gsi to the next
1983 lower_eh_constructs_2 (struct leh_state
*state
, gimple_stmt_iterator
*gsi
)
1987 gimple
*stmt
= gsi_stmt (*gsi
);
1989 switch (gimple_code (stmt
))
1993 tree fndecl
= gimple_call_fndecl (stmt
);
1996 if (fndecl
&& fndecl_built_in_p (fndecl
, BUILT_IN_NORMAL
))
1997 switch (DECL_FUNCTION_CODE (fndecl
))
1999 case BUILT_IN_EH_POINTER
:
2000 /* The front end may have generated a call to
2001 __builtin_eh_pointer (0) within a catch region. Replace
2002 this zero argument with the current catch region number. */
2003 if (state
->ehp_region
)
2005 tree nr
= build_int_cst (integer_type_node
,
2006 state
->ehp_region
->index
);
2007 gimple_call_set_arg (stmt
, 0, nr
);
2011 /* The user has dome something silly. Remove it. */
2012 rhs
= null_pointer_node
;
2017 case BUILT_IN_EH_FILTER
:
2018 /* ??? This should never appear, but since it's a builtin it
2019 is accessible to abuse by users. Just remove it and
2020 replace the use with the arbitrary value zero. */
2021 rhs
= build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
2023 lhs
= gimple_call_lhs (stmt
);
2024 x
= gimple_build_assign (lhs
, rhs
);
2025 gsi_insert_before (gsi
, x
, GSI_SAME_STMT
);
2028 case BUILT_IN_EH_COPY_VALUES
:
2029 /* Likewise this should not appear. Remove it. */
2030 gsi_remove (gsi
, true);
2040 /* If the stmt can throw, use a new temporary for the assignment
2041 to a LHS. This makes sure the old value of the LHS is
2042 available on the EH edge. Only do so for statements that
2043 potentially fall through (no noreturn calls e.g.), otherwise
2044 this new assignment might create fake fallthru regions. */
2045 if (stmt_could_throw_p (cfun
, stmt
)
2046 && gimple_has_lhs (stmt
)
2047 && gimple_stmt_may_fallthru (stmt
)
2048 && !tree_could_throw_p (gimple_get_lhs (stmt
))
2049 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt
))))
2051 tree lhs
= gimple_get_lhs (stmt
);
2052 tree tmp
= create_tmp_var (TREE_TYPE (lhs
));
2053 gimple
*s
= gimple_build_assign (lhs
, tmp
);
2054 gimple_set_location (s
, gimple_location (stmt
));
2055 gimple_set_block (s
, gimple_block (stmt
));
2056 gimple_set_lhs (stmt
, tmp
);
2057 if (TREE_CODE (TREE_TYPE (tmp
)) == COMPLEX_TYPE
2058 || TREE_CODE (TREE_TYPE (tmp
)) == VECTOR_TYPE
)
2059 DECL_GIMPLE_REG_P (tmp
) = 1;
2060 gsi_insert_after (gsi
, s
, GSI_SAME_STMT
);
2062 /* Look for things that can throw exceptions, and record them. */
2063 if (state
->cur_region
&& stmt_could_throw_p (cfun
, stmt
))
2065 record_stmt_eh_region (state
->cur_region
, stmt
);
2066 note_eh_region_may_contain_throw (state
->cur_region
);
2073 maybe_record_in_goto_queue (state
, stmt
);
2077 verify_norecord_switch_expr (state
, as_a
<gswitch
*> (stmt
));
2082 gtry
*try_stmt
= as_a
<gtry
*> (stmt
);
2083 if (gimple_try_kind (try_stmt
) == GIMPLE_TRY_FINALLY
)
2084 replace
= lower_try_finally (state
, try_stmt
);
2087 x
= gimple_seq_first_stmt (gimple_try_cleanup (try_stmt
));
2090 replace
= gimple_try_eval (try_stmt
);
2091 lower_eh_constructs_1 (state
, &replace
);
2094 switch (gimple_code (x
))
2097 replace
= lower_catch (state
, try_stmt
);
2099 case GIMPLE_EH_FILTER
:
2100 replace
= lower_eh_filter (state
, try_stmt
);
2102 case GIMPLE_EH_MUST_NOT_THROW
:
2103 replace
= lower_eh_must_not_throw (state
, try_stmt
);
2105 case GIMPLE_EH_ELSE
:
2106 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2109 replace
= lower_cleanup (state
, try_stmt
);
2115 /* Remove the old stmt and insert the transformed sequence
2117 gsi_insert_seq_before (gsi
, replace
, GSI_SAME_STMT
);
2118 gsi_remove (gsi
, true);
2120 /* Return since we don't want gsi_next () */
2123 case GIMPLE_EH_ELSE
:
2124 /* We should be eliminating this in lower_try_finally et al. */
2128 /* A type, a decl, or some kind of statement that we're not
2129 interested in. Don't walk them. */
2136 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2139 lower_eh_constructs_1 (struct leh_state
*state
, gimple_seq
*pseq
)
2141 gimple_stmt_iterator gsi
;
2142 for (gsi
= gsi_start (*pseq
); !gsi_end_p (gsi
);)
2143 lower_eh_constructs_2 (state
, &gsi
);
2148 const pass_data pass_data_lower_eh
=
2150 GIMPLE_PASS
, /* type */
2152 OPTGROUP_NONE
, /* optinfo_flags */
2153 TV_TREE_EH
, /* tv_id */
2154 PROP_gimple_lcf
, /* properties_required */
2155 PROP_gimple_leh
, /* properties_provided */
2156 0, /* properties_destroyed */
2157 0, /* todo_flags_start */
2158 0, /* todo_flags_finish */
2161 class pass_lower_eh
: public gimple_opt_pass
2164 pass_lower_eh (gcc::context
*ctxt
)
2165 : gimple_opt_pass (pass_data_lower_eh
, ctxt
)
2168 /* opt_pass methods: */
2169 virtual unsigned int execute (function
*);
2171 }; // class pass_lower_eh
2174 pass_lower_eh::execute (function
*fun
)
2176 struct leh_state null_state
;
2179 bodyp
= gimple_body (current_function_decl
);
2183 finally_tree
= new hash_table
<finally_tree_hasher
> (31);
2184 eh_region_may_contain_throw_map
= BITMAP_ALLOC (NULL
);
2185 memset (&null_state
, 0, sizeof (null_state
));
2187 collect_finally_tree_1 (bodyp
, NULL
);
2188 lower_eh_constructs_1 (&null_state
, &bodyp
);
2189 gimple_set_body (current_function_decl
, bodyp
);
2191 /* We assume there's a return statement, or something, at the end of
2192 the function, and thus ploping the EH sequence afterward won't
2194 gcc_assert (!gimple_seq_may_fallthru (bodyp
));
2195 gimple_seq_add_seq (&bodyp
, eh_seq
);
2197 /* We assume that since BODYP already existed, adding EH_SEQ to it
2198 didn't change its value, and we don't have to re-set the function. */
2199 gcc_assert (bodyp
== gimple_body (current_function_decl
));
2201 delete finally_tree
;
2202 finally_tree
= NULL
;
2203 BITMAP_FREE (eh_region_may_contain_throw_map
);
2206 /* If this function needs a language specific EH personality routine
2207 and the frontend didn't already set one do so now. */
2208 if (function_needs_eh_personality (fun
) == eh_personality_lang
2209 && !DECL_FUNCTION_PERSONALITY (current_function_decl
))
2210 DECL_FUNCTION_PERSONALITY (current_function_decl
)
2211 = lang_hooks
.eh_personality ();
2219 make_pass_lower_eh (gcc::context
*ctxt
)
2221 return new pass_lower_eh (ctxt
);
2224 /* Create the multiple edges from an EH_DISPATCH statement to all of
2225 the possible handlers for its EH region. Return true if there's
2226 no fallthru edge; false if there is. */
2229 make_eh_dispatch_edges (geh_dispatch
*stmt
)
2233 basic_block src
, dst
;
2235 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
2236 src
= gimple_bb (stmt
);
2241 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
2243 dst
= label_to_block (cfun
, c
->label
);
2244 make_edge (src
, dst
, 0);
2246 /* A catch-all handler doesn't have a fallthru. */
2247 if (c
->type_list
== NULL
)
2252 case ERT_ALLOWED_EXCEPTIONS
:
2253 dst
= label_to_block (cfun
, r
->u
.allowed
.label
);
2254 make_edge (src
, dst
, 0);
2264 /* Create the single EH edge from STMT to its nearest landing pad,
2265 if there is such a landing pad within the current function. */
2268 make_eh_edges (gimple
*stmt
)
2270 basic_block src
, dst
;
2274 lp_nr
= lookup_stmt_eh_lp (stmt
);
2278 lp
= get_eh_landing_pad_from_number (lp_nr
);
2279 gcc_assert (lp
!= NULL
);
2281 src
= gimple_bb (stmt
);
2282 dst
= label_to_block (cfun
, lp
->post_landing_pad
);
2283 make_edge (src
, dst
, EDGE_EH
);
2286 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2287 do not actually perform the final edge redirection.
2289 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2290 we intend to change the destination EH region as well; this means
2291 EH_LANDING_PAD_NR must already be set on the destination block label.
2292 If false, we're being called from generic cfg manipulation code and we
2293 should preserve our place within the region tree. */
2296 redirect_eh_edge_1 (edge edge_in
, basic_block new_bb
, bool change_region
)
2298 eh_landing_pad old_lp
, new_lp
;
2301 int old_lp_nr
, new_lp_nr
;
2302 tree old_label
, new_label
;
2306 old_bb
= edge_in
->dest
;
2307 old_label
= gimple_block_label (old_bb
);
2308 old_lp_nr
= EH_LANDING_PAD_NR (old_label
);
2309 gcc_assert (old_lp_nr
> 0);
2310 old_lp
= get_eh_landing_pad_from_number (old_lp_nr
);
2312 throw_stmt
= last_stmt (edge_in
->src
);
2313 gcc_assert (lookup_stmt_eh_lp (throw_stmt
) == old_lp_nr
);
2315 new_label
= gimple_block_label (new_bb
);
2317 /* Look for an existing region that might be using NEW_BB already. */
2318 new_lp_nr
= EH_LANDING_PAD_NR (new_label
);
2321 new_lp
= get_eh_landing_pad_from_number (new_lp_nr
);
2322 gcc_assert (new_lp
);
2324 /* Unless CHANGE_REGION is true, the new and old landing pad
2325 had better be associated with the same EH region. */
2326 gcc_assert (change_region
|| new_lp
->region
== old_lp
->region
);
2331 gcc_assert (!change_region
);
2334 /* Notice when we redirect the last EH edge away from OLD_BB. */
2335 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
2336 if (e
!= edge_in
&& (e
->flags
& EDGE_EH
))
2341 /* NEW_LP already exists. If there are still edges into OLD_LP,
2342 there's nothing to do with the EH tree. If there are no more
2343 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2344 If CHANGE_REGION is true, then our caller is expecting to remove
2346 if (e
== NULL
&& !change_region
)
2347 remove_eh_landing_pad (old_lp
);
2351 /* No correct landing pad exists. If there are no more edges
2352 into OLD_LP, then we can simply re-use the existing landing pad.
2353 Otherwise, we have to create a new landing pad. */
2356 EH_LANDING_PAD_NR (old_lp
->post_landing_pad
) = 0;
2360 new_lp
= gen_eh_landing_pad (old_lp
->region
);
2361 new_lp
->post_landing_pad
= new_label
;
2362 EH_LANDING_PAD_NR (new_label
) = new_lp
->index
;
2365 /* Maybe move the throwing statement to the new region. */
2366 if (old_lp
!= new_lp
)
2368 remove_stmt_from_eh_lp (throw_stmt
);
2369 add_stmt_to_eh_lp (throw_stmt
, new_lp
->index
);
2373 /* Redirect EH edge E to NEW_BB. */
2376 redirect_eh_edge (edge edge_in
, basic_block new_bb
)
2378 redirect_eh_edge_1 (edge_in
, new_bb
, false);
2379 return ssa_redirect_edge (edge_in
, new_bb
);
2382 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2383 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2384 The actual edge update will happen in the caller. */
2387 redirect_eh_dispatch_edge (geh_dispatch
*stmt
, edge e
, basic_block new_bb
)
2389 tree new_lab
= gimple_block_label (new_bb
);
2390 bool any_changed
= false;
2395 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
2399 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
2401 old_bb
= label_to_block (cfun
, c
->label
);
2402 if (old_bb
== e
->dest
)
2410 case ERT_ALLOWED_EXCEPTIONS
:
2411 old_bb
= label_to_block (cfun
, r
->u
.allowed
.label
);
2412 gcc_assert (old_bb
== e
->dest
);
2413 r
->u
.allowed
.label
= new_lab
;
2421 gcc_assert (any_changed
);
2424 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2427 operation_could_trap_helper_p (enum tree_code op
,
2438 case TRUNC_DIV_EXPR
:
2440 case FLOOR_DIV_EXPR
:
2441 case ROUND_DIV_EXPR
:
2442 case EXACT_DIV_EXPR
:
2444 case FLOOR_MOD_EXPR
:
2445 case ROUND_MOD_EXPR
:
2446 case TRUNC_MOD_EXPR
:
2451 return flag_trapping_math
;
2452 if (!TREE_CONSTANT (divisor
) || integer_zerop (divisor
))
2461 /* Some floating point comparisons may trap. */
2466 case UNORDERED_EXPR
:
2478 /* These operations don't trap with floating point. */
2484 /* ABSU_EXPR never traps. */
2490 /* Any floating arithmetic may trap. */
2491 if (fp_operation
&& flag_trapping_math
)
2499 /* Constructing an object cannot trap. */
2504 /* Whether *COND_EXPR can trap depends on whether the
2505 first argument can trap, so signal it as not handled.
2506 Whether lhs is floating or not doesn't matter. */
2511 /* Any floating arithmetic may trap. */
2512 if (fp_operation
&& flag_trapping_math
)
2520 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2521 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2522 type operands that may trap. If OP is a division operator, DIVISOR contains
2523 the value of the divisor. */
2526 operation_could_trap_p (enum tree_code op
, bool fp_operation
, bool honor_trapv
,
2529 bool honor_nans
= (fp_operation
&& flag_trapping_math
2530 && !flag_finite_math_only
);
2531 bool honor_snans
= fp_operation
&& flag_signaling_nans
!= 0;
2534 /* This function cannot tell whether or not COND_EXPR and VEC_COND_EXPR could
2535 trap, because that depends on the respective condition op. */
2536 gcc_assert (op
!= COND_EXPR
&& op
!= VEC_COND_EXPR
);
2538 if (TREE_CODE_CLASS (op
) != tcc_comparison
2539 && TREE_CODE_CLASS (op
) != tcc_unary
2540 && TREE_CODE_CLASS (op
) != tcc_binary
)
2543 return operation_could_trap_helper_p (op
, fp_operation
, honor_trapv
,
2544 honor_nans
, honor_snans
, divisor
,
2549 /* Returns true if it is possible to prove that the index of
2550 an array access REF (an ARRAY_REF expression) falls into the
2554 in_array_bounds_p (tree ref
)
2556 tree idx
= TREE_OPERAND (ref
, 1);
2559 if (TREE_CODE (idx
) != INTEGER_CST
)
2562 min
= array_ref_low_bound (ref
);
2563 max
= array_ref_up_bound (ref
);
2566 || TREE_CODE (min
) != INTEGER_CST
2567 || TREE_CODE (max
) != INTEGER_CST
)
2570 if (tree_int_cst_lt (idx
, min
)
2571 || tree_int_cst_lt (max
, idx
))
2577 /* Returns true if it is possible to prove that the range of
2578 an array access REF (an ARRAY_RANGE_REF expression) falls
2579 into the array bounds. */
2582 range_in_array_bounds_p (tree ref
)
2584 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (ref
));
2585 tree range_min
, range_max
, min
, max
;
2587 range_min
= TYPE_MIN_VALUE (domain_type
);
2588 range_max
= TYPE_MAX_VALUE (domain_type
);
2591 || TREE_CODE (range_min
) != INTEGER_CST
2592 || TREE_CODE (range_max
) != INTEGER_CST
)
2595 min
= array_ref_low_bound (ref
);
2596 max
= array_ref_up_bound (ref
);
2599 || TREE_CODE (min
) != INTEGER_CST
2600 || TREE_CODE (max
) != INTEGER_CST
)
2603 if (tree_int_cst_lt (range_min
, min
)
2604 || tree_int_cst_lt (max
, range_max
))
2610 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2611 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2612 This routine expects only GIMPLE lhs or rhs input. */
2615 tree_could_trap_p (tree expr
)
2617 enum tree_code code
;
2618 bool fp_operation
= false;
2619 bool honor_trapv
= false;
2620 tree t
, base
, div
= NULL_TREE
;
2625 /* In COND_EXPR and VEC_COND_EXPR only the condition may trap, but
2626 they won't appear as operands in GIMPLE form, so this is just for the
2627 GENERIC uses where it needs to recurse on the operands and so
2628 *COND_EXPR itself doesn't trap. */
2629 if (TREE_CODE (expr
) == COND_EXPR
|| TREE_CODE (expr
) == VEC_COND_EXPR
)
2632 code
= TREE_CODE (expr
);
2633 t
= TREE_TYPE (expr
);
2637 if (COMPARISON_CLASS_P (expr
))
2638 fp_operation
= FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr
, 0)));
2640 fp_operation
= FLOAT_TYPE_P (t
);
2641 honor_trapv
= INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
);
2644 if (TREE_CODE_CLASS (code
) == tcc_binary
)
2645 div
= TREE_OPERAND (expr
, 1);
2646 if (operation_could_trap_p (code
, fp_operation
, honor_trapv
, div
))
2656 case VIEW_CONVERT_EXPR
:
2657 case WITH_SIZE_EXPR
:
2658 expr
= TREE_OPERAND (expr
, 0);
2659 code
= TREE_CODE (expr
);
2662 case ARRAY_RANGE_REF
:
2663 base
= TREE_OPERAND (expr
, 0);
2664 if (tree_could_trap_p (base
))
2666 if (TREE_THIS_NOTRAP (expr
))
2668 return !range_in_array_bounds_p (expr
);
2671 base
= TREE_OPERAND (expr
, 0);
2672 if (tree_could_trap_p (base
))
2674 if (TREE_THIS_NOTRAP (expr
))
2676 return !in_array_bounds_p (expr
);
2678 case TARGET_MEM_REF
:
2680 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
2681 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr
, 0), 0)))
2683 if (TREE_THIS_NOTRAP (expr
))
2685 /* We cannot prove that the access is in-bounds when we have
2686 variable-index TARGET_MEM_REFs. */
2687 if (code
== TARGET_MEM_REF
2688 && (TMR_INDEX (expr
) || TMR_INDEX2 (expr
)))
2690 if (TREE_CODE (TREE_OPERAND (expr
, 0)) == ADDR_EXPR
)
2692 tree base
= TREE_OPERAND (TREE_OPERAND (expr
, 0), 0);
2693 poly_offset_int off
= mem_ref_offset (expr
);
2694 if (maybe_lt (off
, 0))
2696 if (TREE_CODE (base
) == STRING_CST
)
2697 return maybe_le (TREE_STRING_LENGTH (base
), off
);
2698 tree size
= DECL_SIZE_UNIT (base
);
2699 if (size
== NULL_TREE
2700 || !poly_int_tree_p (size
)
2701 || maybe_le (wi::to_poly_offset (size
), off
))
2703 /* Now we are sure the first byte of the access is inside
2710 return !TREE_THIS_NOTRAP (expr
);
2713 return TREE_THIS_VOLATILE (expr
);
2716 t
= get_callee_fndecl (expr
);
2717 /* Assume that calls to weak functions may trap. */
2718 if (!t
|| !DECL_P (t
))
2721 return tree_could_trap_p (t
);
2725 /* Assume that accesses to weak functions may trap, unless we know
2726 they are certainly defined in current TU or in some other
2728 if (DECL_WEAK (expr
) && !DECL_COMDAT (expr
) && DECL_EXTERNAL (expr
))
2730 cgraph_node
*node
= cgraph_node::get (expr
);
2732 node
= node
->function_symbol ();
2733 return !(node
&& node
->in_other_partition
);
2738 /* Assume that accesses to weak vars may trap, unless we know
2739 they are certainly defined in current TU or in some other
2741 if (DECL_WEAK (expr
) && !DECL_COMDAT (expr
) && DECL_EXTERNAL (expr
))
2743 varpool_node
*node
= varpool_node::get (expr
);
2745 node
= node
->ultimate_alias_target ();
2746 return !(node
&& node
->in_other_partition
);
2755 /* Return non-NULL if there is an integer operation with trapping overflow
2756 we can rewrite into non-trapping. Called via walk_tree from
2757 rewrite_to_non_trapping_overflow. */
2760 find_trapping_overflow (tree
*tp
, int *walk_subtrees
, void *data
)
2763 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (*tp
))
2764 && !operation_no_trapping_overflow (TREE_TYPE (*tp
), TREE_CODE (*tp
)))
2766 if (IS_TYPE_OR_DECL_P (*tp
)
2767 || (TREE_CODE (*tp
) == SAVE_EXPR
&& data
== NULL
))
2772 /* Rewrite selected operations into unsigned arithmetics, so that they
2773 don't trap on overflow. */
2776 replace_trapping_overflow (tree
*tp
, int *walk_subtrees
, void *data
)
2778 if (find_trapping_overflow (tp
, walk_subtrees
, data
))
2780 tree type
= TREE_TYPE (*tp
);
2781 tree utype
= unsigned_type_for (type
);
2783 int len
= TREE_OPERAND_LENGTH (*tp
);
2784 for (int i
= 0; i
< len
; ++i
)
2785 walk_tree (&TREE_OPERAND (*tp
, i
), replace_trapping_overflow
,
2786 data
, (hash_set
<tree
> *) data
);
2788 if (TREE_CODE (*tp
) == ABS_EXPR
)
2790 TREE_SET_CODE (*tp
, ABSU_EXPR
);
2791 TREE_TYPE (*tp
) = utype
;
2792 *tp
= fold_convert (type
, *tp
);
2796 TREE_TYPE (*tp
) = utype
;
2797 len
= TREE_OPERAND_LENGTH (*tp
);
2798 for (int i
= 0; i
< len
; ++i
)
2799 TREE_OPERAND (*tp
, i
)
2800 = fold_convert (utype
, TREE_OPERAND (*tp
, i
));
2801 *tp
= fold_convert (type
, *tp
);
2807 /* If any subexpression of EXPR can trap due to -ftrapv, rewrite it
2808 using unsigned arithmetics to avoid traps in it. */
2811 rewrite_to_non_trapping_overflow (tree expr
)
2815 hash_set
<tree
> pset
;
2816 if (!walk_tree (&expr
, find_trapping_overflow
, &pset
, &pset
))
2818 expr
= unshare_expr (expr
);
2820 walk_tree (&expr
, replace_trapping_overflow
, &pset
, &pset
);
2824 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2825 an assignment or a conditional) may throw. */
2828 stmt_could_throw_1_p (gassign
*stmt
)
2830 enum tree_code code
= gimple_assign_rhs_code (stmt
);
2831 bool honor_nans
= false;
2832 bool honor_snans
= false;
2833 bool fp_operation
= false;
2834 bool honor_trapv
= false;
2839 if (TREE_CODE_CLASS (code
) == tcc_comparison
2840 || TREE_CODE_CLASS (code
) == tcc_unary
2841 || TREE_CODE_CLASS (code
) == tcc_binary
)
2843 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
2844 t
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
2846 t
= gimple_expr_type (stmt
);
2847 fp_operation
= FLOAT_TYPE_P (t
);
2850 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
2851 honor_snans
= flag_signaling_nans
!= 0;
2853 else if (INTEGRAL_TYPE_P (t
) && TYPE_OVERFLOW_TRAPS (t
))
2857 /* First check the LHS. */
2858 if (tree_could_trap_p (gimple_assign_lhs (stmt
)))
2861 /* Check if the main expression may trap. */
2862 ret
= operation_could_trap_helper_p (code
, fp_operation
, honor_trapv
,
2863 honor_nans
, honor_snans
,
2864 gimple_assign_rhs2 (stmt
),
2869 /* If the expression does not trap, see if any of the individual operands may
2871 for (i
= 1; i
< gimple_num_ops (stmt
); i
++)
2872 if (tree_could_trap_p (gimple_op (stmt
, i
)))
2879 /* Return true if statement STMT within FUN could throw an exception. */
2882 stmt_could_throw_p (function
*fun
, gimple
*stmt
)
2884 if (!flag_exceptions
)
2887 /* The only statements that can throw an exception are assignments,
2888 conditionals, calls, resx, and asms. */
2889 switch (gimple_code (stmt
))
2895 return !gimple_call_nothrow_p (as_a
<gcall
*> (stmt
));
2899 if (fun
&& !fun
->can_throw_non_call_exceptions
)
2901 gcond
*cond
= as_a
<gcond
*> (stmt
);
2902 tree lhs
= gimple_cond_lhs (cond
);
2903 return operation_could_trap_p (gimple_cond_code (cond
),
2904 FLOAT_TYPE_P (TREE_TYPE (lhs
)),
2909 if ((fun
&& !fun
->can_throw_non_call_exceptions
)
2910 || gimple_clobber_p (stmt
))
2912 return stmt_could_throw_1_p (as_a
<gassign
*> (stmt
));
2915 if (fun
&& !fun
->can_throw_non_call_exceptions
)
2917 return gimple_asm_volatile_p (as_a
<gasm
*> (stmt
));
2925 /* Return true if expression T could throw an exception. */
2928 tree_could_throw_p (tree t
)
2930 if (!flag_exceptions
)
2932 if (TREE_CODE (t
) == MODIFY_EXPR
)
2934 if (cfun
->can_throw_non_call_exceptions
2935 && tree_could_trap_p (TREE_OPERAND (t
, 0)))
2937 t
= TREE_OPERAND (t
, 1);
2940 if (TREE_CODE (t
) == WITH_SIZE_EXPR
)
2941 t
= TREE_OPERAND (t
, 0);
2942 if (TREE_CODE (t
) == CALL_EXPR
)
2943 return (call_expr_flags (t
) & ECF_NOTHROW
) == 0;
2944 if (cfun
->can_throw_non_call_exceptions
)
2945 return tree_could_trap_p (t
);
2949 /* Return true if STMT can throw an exception that is not caught within its
2950 function FUN. FUN can be NULL but the function is extra conservative
2954 stmt_can_throw_external (function
*fun
, gimple
*stmt
)
2958 if (!stmt_could_throw_p (fun
, stmt
))
2963 lp_nr
= lookup_stmt_eh_lp_fn (fun
, stmt
);
2967 /* Return true if STMT can throw an exception that is caught within its
2971 stmt_can_throw_internal (function
*fun
, gimple
*stmt
)
2975 gcc_checking_assert (fun
);
2976 if (!stmt_could_throw_p (fun
, stmt
))
2979 lp_nr
= lookup_stmt_eh_lp_fn (fun
, stmt
);
2983 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2984 remove any entry it might have from the EH table. Return true if
2985 any change was made. */
2988 maybe_clean_eh_stmt_fn (struct function
*ifun
, gimple
*stmt
)
2990 if (stmt_could_throw_p (ifun
, stmt
))
2992 return remove_stmt_from_eh_lp_fn (ifun
, stmt
);
2995 /* Likewise, but always use the current function. */
2998 maybe_clean_eh_stmt (gimple
*stmt
)
3000 return maybe_clean_eh_stmt_fn (cfun
, stmt
);
3003 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
3004 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
3005 in the table if it should be in there. Return TRUE if a replacement was
3006 done that my require an EH edge purge. */
3009 maybe_clean_or_replace_eh_stmt (gimple
*old_stmt
, gimple
*new_stmt
)
3011 int lp_nr
= lookup_stmt_eh_lp (old_stmt
);
3015 bool new_stmt_could_throw
= stmt_could_throw_p (cfun
, new_stmt
);
3017 if (new_stmt
== old_stmt
&& new_stmt_could_throw
)
3020 remove_stmt_from_eh_lp (old_stmt
);
3021 if (new_stmt_could_throw
)
3023 add_stmt_to_eh_lp (new_stmt
, lp_nr
);
3033 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
3034 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
3035 operand is the return value of duplicate_eh_regions. */
3038 maybe_duplicate_eh_stmt_fn (struct function
*new_fun
, gimple
*new_stmt
,
3039 struct function
*old_fun
, gimple
*old_stmt
,
3040 hash_map
<void *, void *> *map
,
3043 int old_lp_nr
, new_lp_nr
;
3045 if (!stmt_could_throw_p (new_fun
, new_stmt
))
3048 old_lp_nr
= lookup_stmt_eh_lp_fn (old_fun
, old_stmt
);
3051 if (default_lp_nr
== 0)
3053 new_lp_nr
= default_lp_nr
;
3055 else if (old_lp_nr
> 0)
3057 eh_landing_pad old_lp
, new_lp
;
3059 old_lp
= (*old_fun
->eh
->lp_array
)[old_lp_nr
];
3060 new_lp
= static_cast<eh_landing_pad
> (*map
->get (old_lp
));
3061 new_lp_nr
= new_lp
->index
;
3065 eh_region old_r
, new_r
;
3067 old_r
= (*old_fun
->eh
->region_array
)[-old_lp_nr
];
3068 new_r
= static_cast<eh_region
> (*map
->get (old_r
));
3069 new_lp_nr
= -new_r
->index
;
3072 add_stmt_to_eh_lp_fn (new_fun
, new_stmt
, new_lp_nr
);
3076 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
3077 and thus no remapping is required. */
3080 maybe_duplicate_eh_stmt (gimple
*new_stmt
, gimple
*old_stmt
)
3084 if (!stmt_could_throw_p (cfun
, new_stmt
))
3087 lp_nr
= lookup_stmt_eh_lp (old_stmt
);
3091 add_stmt_to_eh_lp (new_stmt
, lp_nr
);
3095 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
3096 GIMPLE_TRY) that are similar enough to be considered the same. Currently
3097 this only handles handlers consisting of a single call, as that's the
3098 important case for C++: a destructor call for a particular object showing
3099 up in multiple handlers. */
3102 same_handler_p (gimple_seq oneh
, gimple_seq twoh
)
3104 gimple_stmt_iterator gsi
;
3105 gimple
*ones
, *twos
;
3108 gsi
= gsi_start (oneh
);
3109 if (!gsi_one_before_end_p (gsi
))
3111 ones
= gsi_stmt (gsi
);
3113 gsi
= gsi_start (twoh
);
3114 if (!gsi_one_before_end_p (gsi
))
3116 twos
= gsi_stmt (gsi
);
3118 if (!is_gimple_call (ones
)
3119 || !is_gimple_call (twos
)
3120 || gimple_call_lhs (ones
)
3121 || gimple_call_lhs (twos
)
3122 || gimple_call_chain (ones
)
3123 || gimple_call_chain (twos
)
3124 || !gimple_call_same_target_p (ones
, twos
)
3125 || gimple_call_num_args (ones
) != gimple_call_num_args (twos
))
3128 for (ai
= 0; ai
< gimple_call_num_args (ones
); ++ai
)
3129 if (!operand_equal_p (gimple_call_arg (ones
, ai
),
3130 gimple_call_arg (twos
, ai
), 0))
3137 try { A() } finally { try { ~B() } catch { ~A() } }
3138 try { ... } finally { ~A() }
3140 try { A() } catch { ~B() }
3141 try { ~B() ... } finally { ~A() }
3143 This occurs frequently in C++, where A is a local variable and B is a
3144 temporary used in the initializer for A. */
3147 optimize_double_finally (gtry
*one
, gtry
*two
)
3150 gimple_stmt_iterator gsi
;
3153 cleanup
= gimple_try_cleanup (one
);
3154 gsi
= gsi_start (cleanup
);
3155 if (!gsi_one_before_end_p (gsi
))
3158 oneh
= gsi_stmt (gsi
);
3159 if (gimple_code (oneh
) != GIMPLE_TRY
3160 || gimple_try_kind (oneh
) != GIMPLE_TRY_CATCH
)
3163 if (same_handler_p (gimple_try_cleanup (oneh
), gimple_try_cleanup (two
)))
3165 gimple_seq seq
= gimple_try_eval (oneh
);
3167 gimple_try_set_cleanup (one
, seq
);
3168 gimple_try_set_kind (one
, GIMPLE_TRY_CATCH
);
3169 seq
= copy_gimple_seq_and_replace_locals (seq
);
3170 gimple_seq_add_seq (&seq
, gimple_try_eval (two
));
3171 gimple_try_set_eval (two
, seq
);
3175 /* Perform EH refactoring optimizations that are simpler to do when code
3176 flow has been lowered but EH structures haven't. */
3179 refactor_eh_r (gimple_seq seq
)
3181 gimple_stmt_iterator gsi
;
3186 gsi
= gsi_start (seq
);
3190 if (gsi_end_p (gsi
))
3193 two
= gsi_stmt (gsi
);
3195 if (gtry
*try_one
= dyn_cast
<gtry
*> (one
))
3196 if (gtry
*try_two
= dyn_cast
<gtry
*> (two
))
3197 if (gimple_try_kind (try_one
) == GIMPLE_TRY_FINALLY
3198 && gimple_try_kind (try_two
) == GIMPLE_TRY_FINALLY
)
3199 optimize_double_finally (try_one
, try_two
);
3201 switch (gimple_code (one
))
3204 refactor_eh_r (gimple_try_eval (one
));
3205 refactor_eh_r (gimple_try_cleanup (one
));
3208 refactor_eh_r (gimple_catch_handler (as_a
<gcatch
*> (one
)));
3210 case GIMPLE_EH_FILTER
:
3211 refactor_eh_r (gimple_eh_filter_failure (one
));
3213 case GIMPLE_EH_ELSE
:
3215 geh_else
*eh_else_stmt
= as_a
<geh_else
*> (one
);
3216 refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt
));
3217 refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt
));
3232 const pass_data pass_data_refactor_eh
=
3234 GIMPLE_PASS
, /* type */
3236 OPTGROUP_NONE
, /* optinfo_flags */
3237 TV_TREE_EH
, /* tv_id */
3238 PROP_gimple_lcf
, /* properties_required */
3239 0, /* properties_provided */
3240 0, /* properties_destroyed */
3241 0, /* todo_flags_start */
3242 0, /* todo_flags_finish */
3245 class pass_refactor_eh
: public gimple_opt_pass
3248 pass_refactor_eh (gcc::context
*ctxt
)
3249 : gimple_opt_pass (pass_data_refactor_eh
, ctxt
)
3252 /* opt_pass methods: */
3253 virtual bool gate (function
*) { return flag_exceptions
!= 0; }
3254 virtual unsigned int execute (function
*)
3256 refactor_eh_r (gimple_body (current_function_decl
));
3260 }; // class pass_refactor_eh
3265 make_pass_refactor_eh (gcc::context
*ctxt
)
3267 return new pass_refactor_eh (ctxt
);
3270 /* At the end of gimple optimization, we can lower RESX. */
3273 lower_resx (basic_block bb
, gresx
*stmt
,
3274 hash_map
<eh_region
, tree
> *mnt_map
)
3277 eh_region src_r
, dst_r
;
3278 gimple_stmt_iterator gsi
;
3283 lp_nr
= lookup_stmt_eh_lp (stmt
);
3285 dst_r
= get_eh_region_from_lp_number (lp_nr
);
3289 src_r
= get_eh_region_from_number (gimple_resx_region (stmt
));
3290 gsi
= gsi_last_bb (bb
);
3294 /* We can wind up with no source region when pass_cleanup_eh shows
3295 that there are no entries into an eh region and deletes it, but
3296 then the block that contains the resx isn't removed. This can
3297 happen without optimization when the switch statement created by
3298 lower_try_finally_switch isn't simplified to remove the eh case.
3300 Resolve this by expanding the resx node to an abort. */
3302 fn
= builtin_decl_implicit (BUILT_IN_TRAP
);
3303 x
= gimple_build_call (fn
, 0);
3304 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3306 while (EDGE_COUNT (bb
->succs
) > 0)
3307 remove_edge (EDGE_SUCC (bb
, 0));
3311 /* When we have a destination region, we resolve this by copying
3312 the excptr and filter values into place, and changing the edge
3313 to immediately after the landing pad. */
3321 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3322 the failure decl into a new block, if needed. */
3323 gcc_assert (dst_r
->type
== ERT_MUST_NOT_THROW
);
3325 tree
*slot
= mnt_map
->get (dst_r
);
3328 gimple_stmt_iterator gsi2
;
3330 new_bb
= create_empty_bb (bb
);
3331 new_bb
->count
= bb
->count
;
3332 add_bb_to_loop (new_bb
, bb
->loop_father
);
3333 lab
= gimple_block_label (new_bb
);
3334 gsi2
= gsi_start_bb (new_bb
);
3336 fn
= dst_r
->u
.must_not_throw
.failure_decl
;
3337 x
= gimple_build_call (fn
, 0);
3338 gimple_set_location (x
, dst_r
->u
.must_not_throw
.failure_loc
);
3339 gsi_insert_after (&gsi2
, x
, GSI_CONTINUE_LINKING
);
3341 mnt_map
->put (dst_r
, lab
);
3346 new_bb
= label_to_block (cfun
, lab
);
3349 gcc_assert (EDGE_COUNT (bb
->succs
) == 0);
3350 e
= make_single_succ_edge (bb
, new_bb
, EDGE_FALLTHRU
);
3355 tree dst_nr
= build_int_cst (integer_type_node
, dst_r
->index
);
3357 fn
= builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES
);
3358 src_nr
= build_int_cst (integer_type_node
, src_r
->index
);
3359 x
= gimple_build_call (fn
, 2, dst_nr
, src_nr
);
3360 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3362 /* Update the flags for the outgoing edge. */
3363 e
= single_succ_edge (bb
);
3364 gcc_assert (e
->flags
& EDGE_EH
);
3365 e
->flags
= (e
->flags
& ~EDGE_EH
) | EDGE_FALLTHRU
;
3366 e
->probability
= profile_probability::always ();
3368 /* If there are no more EH users of the landing pad, delete it. */
3369 FOR_EACH_EDGE (e
, ei
, e
->dest
->preds
)
3370 if (e
->flags
& EDGE_EH
)
3374 eh_landing_pad lp
= get_eh_landing_pad_from_number (lp_nr
);
3375 remove_eh_landing_pad (lp
);
3385 /* When we don't have a destination region, this exception escapes
3386 up the call chain. We resolve this by generating a call to the
3387 _Unwind_Resume library function. */
3389 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3390 with no arguments for C++. Check for that. */
3391 if (src_r
->use_cxa_end_cleanup
)
3393 fn
= builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP
);
3394 x
= gimple_build_call (fn
, 0);
3395 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3399 fn
= builtin_decl_implicit (BUILT_IN_EH_POINTER
);
3400 src_nr
= build_int_cst (integer_type_node
, src_r
->index
);
3401 x
= gimple_build_call (fn
, 1, src_nr
);
3402 var
= create_tmp_var (ptr_type_node
);
3403 var
= make_ssa_name (var
, x
);
3404 gimple_call_set_lhs (x
, var
);
3405 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3407 /* When exception handling is delegated to a caller function, we
3408 have to guarantee that shadow memory variables living on stack
3409 will be cleaner before control is given to a parent function. */
3410 if (sanitize_flags_p (SANITIZE_ADDRESS
))
3413 = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN
);
3414 gimple
*g
= gimple_build_call (decl
, 0);
3415 gimple_set_location (g
, gimple_location (stmt
));
3416 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
3419 fn
= builtin_decl_implicit (BUILT_IN_UNWIND_RESUME
);
3420 x
= gimple_build_call (fn
, 1, var
);
3421 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3424 gcc_assert (EDGE_COUNT (bb
->succs
) == 0);
3427 gsi_remove (&gsi
, true);
3434 const pass_data pass_data_lower_resx
=
3436 GIMPLE_PASS
, /* type */
3438 OPTGROUP_NONE
, /* optinfo_flags */
3439 TV_TREE_EH
, /* tv_id */
3440 PROP_gimple_lcf
, /* properties_required */
3441 0, /* properties_provided */
3442 0, /* properties_destroyed */
3443 0, /* todo_flags_start */
3444 0, /* todo_flags_finish */
3447 class pass_lower_resx
: public gimple_opt_pass
3450 pass_lower_resx (gcc::context
*ctxt
)
3451 : gimple_opt_pass (pass_data_lower_resx
, ctxt
)
3454 /* opt_pass methods: */
3455 virtual bool gate (function
*) { return flag_exceptions
!= 0; }
3456 virtual unsigned int execute (function
*);
3458 }; // class pass_lower_resx
3461 pass_lower_resx::execute (function
*fun
)
3464 bool dominance_invalidated
= false;
3465 bool any_rewritten
= false;
3467 hash_map
<eh_region
, tree
> mnt_map
;
3469 FOR_EACH_BB_FN (bb
, fun
)
3471 gimple
*last
= last_stmt (bb
);
3472 if (last
&& is_gimple_resx (last
))
3474 dominance_invalidated
|=
3475 lower_resx (bb
, as_a
<gresx
*> (last
), &mnt_map
);
3476 any_rewritten
= true;
3480 if (dominance_invalidated
)
3482 free_dominance_info (CDI_DOMINATORS
);
3483 free_dominance_info (CDI_POST_DOMINATORS
);
3486 return any_rewritten
? TODO_update_ssa_only_virtuals
: 0;
3492 make_pass_lower_resx (gcc::context
*ctxt
)
3494 return new pass_lower_resx (ctxt
);
3497 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3501 optimize_clobbers (basic_block bb
)
3503 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
3504 bool any_clobbers
= false;
3505 bool seen_stack_restore
= false;
3509 /* Only optimize anything if the bb contains at least one clobber,
3510 ends with resx (checked by caller), optionally contains some
3511 debug stmts or labels, or at most one __builtin_stack_restore
3512 call, and has an incoming EH edge. */
3513 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3515 gimple
*stmt
= gsi_stmt (gsi
);
3516 if (is_gimple_debug (stmt
))
3518 if (gimple_clobber_p (stmt
))
3520 any_clobbers
= true;
3523 if (!seen_stack_restore
3524 && gimple_call_builtin_p (stmt
, BUILT_IN_STACK_RESTORE
))
3526 seen_stack_restore
= true;
3529 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3535 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3536 if (e
->flags
& EDGE_EH
)
3540 gsi
= gsi_last_bb (bb
);
3541 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3543 gimple
*stmt
= gsi_stmt (gsi
);
3544 if (!gimple_clobber_p (stmt
))
3546 unlink_stmt_vdef (stmt
);
3547 gsi_remove (&gsi
, true);
3548 release_defs (stmt
);
3552 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3553 internal throw to successor BB. */
3556 sink_clobbers (basic_block bb
)
3560 gimple_stmt_iterator gsi
, dgsi
;
3562 bool any_clobbers
= false;
3565 /* Only optimize if BB has a single EH successor and
3566 all predecessor edges are EH too. */
3567 if (!single_succ_p (bb
)
3568 || (single_succ_edge (bb
)->flags
& EDGE_EH
) == 0)
3571 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3573 if ((e
->flags
& EDGE_EH
) == 0)
3577 /* And BB contains only CLOBBER stmts before the final
3579 gsi
= gsi_last_bb (bb
);
3580 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3582 gimple
*stmt
= gsi_stmt (gsi
);
3583 if (is_gimple_debug (stmt
))
3585 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3587 if (!gimple_clobber_p (stmt
))
3589 any_clobbers
= true;
3594 edge succe
= single_succ_edge (bb
);
3595 succbb
= succe
->dest
;
3597 /* See if there is a virtual PHI node to take an updated virtual
3600 tree vuse
= NULL_TREE
;
3601 for (gphi_iterator gpi
= gsi_start_phis (succbb
);
3602 !gsi_end_p (gpi
); gsi_next (&gpi
))
3604 tree res
= gimple_phi_result (gpi
.phi ());
3605 if (virtual_operand_p (res
))
3613 dgsi
= gsi_after_labels (succbb
);
3614 gsi
= gsi_last_bb (bb
);
3615 for (gsi_prev (&gsi
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
3617 gimple
*stmt
= gsi_stmt (gsi
);
3619 if (is_gimple_debug (stmt
))
3621 if (gimple_code (stmt
) == GIMPLE_LABEL
)
3623 lhs
= gimple_assign_lhs (stmt
);
3624 /* Unfortunately we don't have dominance info updated at this
3625 point, so checking if
3626 dominated_by_p (CDI_DOMINATORS, succbb,
3627 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3628 would be too costly. Thus, avoid sinking any clobbers that
3629 refer to non-(D) SSA_NAMEs. */
3630 if (TREE_CODE (lhs
) == MEM_REF
3631 && TREE_CODE (TREE_OPERAND (lhs
, 0)) == SSA_NAME
3632 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs
, 0)))
3634 unlink_stmt_vdef (stmt
);
3635 gsi_remove (&gsi
, true);
3636 release_defs (stmt
);
3640 /* As we do not change stmt order when sinking across a
3641 forwarder edge we can keep virtual operands in place. */
3642 gsi_remove (&gsi
, false);
3643 gsi_insert_before (&dgsi
, stmt
, GSI_NEW_STMT
);
3645 /* But adjust virtual operands if we sunk across a PHI node. */
3649 imm_use_iterator iter
;
3650 use_operand_p use_p
;
3651 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, vuse
)
3652 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
3653 SET_USE (use_p
, gimple_vdef (stmt
));
3654 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
))
3656 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt
)) = 1;
3657 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse
) = 0;
3659 /* Adjust the incoming virtual operand. */
3660 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi
, succe
), gimple_vuse (stmt
));
3661 SET_USE (gimple_vuse_op (stmt
), vuse
);
3663 /* If there isn't a single predecessor but no virtual PHI node
3664 arrange for virtual operands to be renamed. */
3665 else if (gimple_vuse_op (stmt
) != NULL_USE_OPERAND_P
3666 && !single_pred_p (succbb
))
3668 /* In this case there will be no use of the VDEF of this stmt.
3669 ??? Unless this is a secondary opportunity and we have not
3670 removed unreachable blocks yet, so we cannot assert this.
3671 Which also means we will end up renaming too many times. */
3672 SET_USE (gimple_vuse_op (stmt
), gimple_vop (cfun
));
3673 mark_virtual_operands_for_renaming (cfun
);
3674 todo
|= TODO_update_ssa_only_virtuals
;
3681 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3682 we have found some duplicate labels and removed some edges. */
3685 lower_eh_dispatch (basic_block src
, geh_dispatch
*stmt
)
3687 gimple_stmt_iterator gsi
;
3692 bool redirected
= false;
3694 region_nr
= gimple_eh_dispatch_region (stmt
);
3695 r
= get_eh_region_from_number (region_nr
);
3697 gsi
= gsi_last_bb (src
);
3703 auto_vec
<tree
> labels
;
3704 tree default_label
= NULL
;
3708 hash_set
<tree
> seen_values
;
3710 /* Collect the labels for a switch. Zero the post_landing_pad
3711 field becase we'll no longer have anything keeping these labels
3712 in existence and the optimizer will be free to merge these
3714 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
3716 tree tp_node
, flt_node
, lab
= c
->label
;
3717 bool have_label
= false;
3720 tp_node
= c
->type_list
;
3721 flt_node
= c
->filter_list
;
3723 if (tp_node
== NULL
)
3725 default_label
= lab
;
3730 /* Filter out duplicate labels that arise when this handler
3731 is shadowed by an earlier one. When no labels are
3732 attached to the handler anymore, we remove
3733 the corresponding edge and then we delete unreachable
3734 blocks at the end of this pass. */
3735 if (! seen_values
.contains (TREE_VALUE (flt_node
)))
3737 tree t
= build_case_label (TREE_VALUE (flt_node
),
3739 labels
.safe_push (t
);
3740 seen_values
.add (TREE_VALUE (flt_node
));
3744 tp_node
= TREE_CHAIN (tp_node
);
3745 flt_node
= TREE_CHAIN (flt_node
);
3750 remove_edge (find_edge (src
, label_to_block (cfun
, lab
)));
3755 /* Clean up the edge flags. */
3756 FOR_EACH_EDGE (e
, ei
, src
->succs
)
3758 if (e
->flags
& EDGE_FALLTHRU
)
3760 /* If there was no catch-all, use the fallthru edge. */
3761 if (default_label
== NULL
)
3762 default_label
= gimple_block_label (e
->dest
);
3763 e
->flags
&= ~EDGE_FALLTHRU
;
3766 gcc_assert (default_label
!= NULL
);
3768 /* Don't generate a switch if there's only a default case.
3769 This is common in the form of try { A; } catch (...) { B; }. */
3770 if (!labels
.exists ())
3772 e
= single_succ_edge (src
);
3773 e
->flags
|= EDGE_FALLTHRU
;
3777 fn
= builtin_decl_implicit (BUILT_IN_EH_FILTER
);
3778 x
= gimple_build_call (fn
, 1, build_int_cst (integer_type_node
,
3780 filter
= create_tmp_var (TREE_TYPE (TREE_TYPE (fn
)));
3781 filter
= make_ssa_name (filter
, x
);
3782 gimple_call_set_lhs (x
, filter
);
3783 gimple_set_location (x
, gimple_location (stmt
));
3784 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3786 /* Turn the default label into a default case. */
3787 default_label
= build_case_label (NULL
, NULL
, default_label
);
3788 sort_case_labels (labels
);
3790 x
= gimple_build_switch (filter
, default_label
, labels
);
3791 gimple_set_location (x
, gimple_location (stmt
));
3792 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3797 case ERT_ALLOWED_EXCEPTIONS
:
3799 edge b_e
= BRANCH_EDGE (src
);
3800 edge f_e
= FALLTHRU_EDGE (src
);
3802 fn
= builtin_decl_implicit (BUILT_IN_EH_FILTER
);
3803 x
= gimple_build_call (fn
, 1, build_int_cst (integer_type_node
,
3805 filter
= create_tmp_var (TREE_TYPE (TREE_TYPE (fn
)));
3806 filter
= make_ssa_name (filter
, x
);
3807 gimple_call_set_lhs (x
, filter
);
3808 gimple_set_location (x
, gimple_location (stmt
));
3809 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3811 r
->u
.allowed
.label
= NULL
;
3812 x
= gimple_build_cond (EQ_EXPR
, filter
,
3813 build_int_cst (TREE_TYPE (filter
),
3814 r
->u
.allowed
.filter
),
3815 NULL_TREE
, NULL_TREE
);
3816 gsi_insert_before (&gsi
, x
, GSI_SAME_STMT
);
3818 b_e
->flags
= b_e
->flags
| EDGE_TRUE_VALUE
;
3819 f_e
->flags
= (f_e
->flags
& ~EDGE_FALLTHRU
) | EDGE_FALSE_VALUE
;
3827 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3828 gsi_remove (&gsi
, true);
3834 const pass_data pass_data_lower_eh_dispatch
=
3836 GIMPLE_PASS
, /* type */
3837 "ehdisp", /* name */
3838 OPTGROUP_NONE
, /* optinfo_flags */
3839 TV_TREE_EH
, /* tv_id */
3840 PROP_gimple_lcf
, /* properties_required */
3841 0, /* properties_provided */
3842 0, /* properties_destroyed */
3843 0, /* todo_flags_start */
3844 0, /* todo_flags_finish */
3847 class pass_lower_eh_dispatch
: public gimple_opt_pass
3850 pass_lower_eh_dispatch (gcc::context
*ctxt
)
3851 : gimple_opt_pass (pass_data_lower_eh_dispatch
, ctxt
)
3854 /* opt_pass methods: */
3855 virtual bool gate (function
*fun
) { return fun
->eh
->region_tree
!= NULL
; }
3856 virtual unsigned int execute (function
*);
3858 }; // class pass_lower_eh_dispatch
3861 pass_lower_eh_dispatch::execute (function
*fun
)
3865 bool redirected
= false;
3867 assign_filter_values ();
3869 FOR_EACH_BB_FN (bb
, fun
)
3871 gimple
*last
= last_stmt (bb
);
3874 if (gimple_code (last
) == GIMPLE_EH_DISPATCH
)
3876 redirected
|= lower_eh_dispatch (bb
,
3877 as_a
<geh_dispatch
*> (last
));
3878 flags
|= TODO_update_ssa_only_virtuals
;
3880 else if (gimple_code (last
) == GIMPLE_RESX
)
3882 if (stmt_can_throw_external (cfun
, last
))
3883 optimize_clobbers (bb
);
3885 flags
|= sink_clobbers (bb
);
3891 free_dominance_info (CDI_DOMINATORS
);
3892 delete_unreachable_blocks ();
3900 make_pass_lower_eh_dispatch (gcc::context
*ctxt
)
3902 return new pass_lower_eh_dispatch (ctxt
);
3905 /* Walk statements, see what regions and, optionally, landing pads
3906 are really referenced.
3908 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3909 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3911 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3914 The caller is responsible for freeing the returned sbitmaps. */
3917 mark_reachable_handlers (sbitmap
*r_reachablep
, sbitmap
*lp_reachablep
)
3919 sbitmap r_reachable
, lp_reachable
;
3921 bool mark_landing_pads
= (lp_reachablep
!= NULL
);
3922 gcc_checking_assert (r_reachablep
!= NULL
);
3924 r_reachable
= sbitmap_alloc (cfun
->eh
->region_array
->length ());
3925 bitmap_clear (r_reachable
);
3926 *r_reachablep
= r_reachable
;
3928 if (mark_landing_pads
)
3930 lp_reachable
= sbitmap_alloc (cfun
->eh
->lp_array
->length ());
3931 bitmap_clear (lp_reachable
);
3932 *lp_reachablep
= lp_reachable
;
3935 lp_reachable
= NULL
;
3937 FOR_EACH_BB_FN (bb
, cfun
)
3939 gimple_stmt_iterator gsi
;
3941 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3943 gimple
*stmt
= gsi_stmt (gsi
);
3945 if (mark_landing_pads
)
3947 int lp_nr
= lookup_stmt_eh_lp (stmt
);
3949 /* Negative LP numbers are MUST_NOT_THROW regions which
3950 are not considered BB enders. */
3952 bitmap_set_bit (r_reachable
, -lp_nr
);
3954 /* Positive LP numbers are real landing pads, and BB enders. */
3957 gcc_assert (gsi_one_before_end_p (gsi
));
3958 eh_region region
= get_eh_region_from_lp_number (lp_nr
);
3959 bitmap_set_bit (r_reachable
, region
->index
);
3960 bitmap_set_bit (lp_reachable
, lp_nr
);
3964 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3965 switch (gimple_code (stmt
))
3968 bitmap_set_bit (r_reachable
,
3969 gimple_resx_region (as_a
<gresx
*> (stmt
)));
3971 case GIMPLE_EH_DISPATCH
:
3972 bitmap_set_bit (r_reachable
,
3973 gimple_eh_dispatch_region (
3974 as_a
<geh_dispatch
*> (stmt
)));
3977 if (gimple_call_builtin_p (stmt
, BUILT_IN_EH_COPY_VALUES
))
3978 for (int i
= 0; i
< 2; ++i
)
3980 tree rt
= gimple_call_arg (stmt
, i
);
3981 HOST_WIDE_INT ri
= tree_to_shwi (rt
);
3983 gcc_assert (ri
== (int)ri
);
3984 bitmap_set_bit (r_reachable
, ri
);
3994 /* Remove unreachable handlers and unreachable landing pads. */
3997 remove_unreachable_handlers (void)
3999 sbitmap r_reachable
, lp_reachable
;
4004 mark_reachable_handlers (&r_reachable
, &lp_reachable
);
4008 fprintf (dump_file
, "Before removal of unreachable regions:\n");
4009 dump_eh_tree (dump_file
, cfun
);
4010 fprintf (dump_file
, "Reachable regions: ");
4011 dump_bitmap_file (dump_file
, r_reachable
);
4012 fprintf (dump_file
, "Reachable landing pads: ");
4013 dump_bitmap_file (dump_file
, lp_reachable
);
4018 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->region_array
, i
, region
)
4019 if (region
&& !bitmap_bit_p (r_reachable
, region
->index
))
4021 "Removing unreachable region %d\n",
4025 remove_unreachable_eh_regions (r_reachable
);
4027 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->lp_array
, i
, lp
)
4028 if (lp
&& !bitmap_bit_p (lp_reachable
, lp
->index
))
4032 "Removing unreachable landing pad %d\n",
4034 remove_eh_landing_pad (lp
);
4039 fprintf (dump_file
, "\n\nAfter removal of unreachable regions:\n");
4040 dump_eh_tree (dump_file
, cfun
);
4041 fprintf (dump_file
, "\n\n");
4044 sbitmap_free (r_reachable
);
4045 sbitmap_free (lp_reachable
);
4048 verify_eh_tree (cfun
);
4051 /* Remove unreachable handlers if any landing pads have been removed after
4052 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
4055 maybe_remove_unreachable_handlers (void)
4060 if (cfun
->eh
== NULL
)
4063 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->lp_array
, i
, lp
)
4065 && (lp
->post_landing_pad
== NULL_TREE
4066 || label_to_block (cfun
, lp
->post_landing_pad
) == NULL
))
4068 remove_unreachable_handlers ();
4073 /* Remove regions that do not have landing pads. This assumes
4074 that remove_unreachable_handlers has already been run, and
4075 that we've just manipulated the landing pads since then.
4077 Preserve regions with landing pads and regions that prevent
4078 exceptions from propagating further, even if these regions
4079 are not reachable. */
4082 remove_unreachable_handlers_no_lp (void)
4085 sbitmap r_reachable
;
4088 mark_reachable_handlers (&r_reachable
, /*lp_reachablep=*/NULL
);
4090 FOR_EACH_VEC_SAFE_ELT (cfun
->eh
->region_array
, i
, region
)
4095 if (region
->landing_pads
!= NULL
4096 || region
->type
== ERT_MUST_NOT_THROW
)
4097 bitmap_set_bit (r_reachable
, region
->index
);
4100 && !bitmap_bit_p (r_reachable
, region
->index
))
4102 "Removing unreachable region %d\n",
4106 remove_unreachable_eh_regions (r_reachable
);
4108 sbitmap_free (r_reachable
);
4111 /* Undo critical edge splitting on an EH landing pad. Earlier, we
4112 optimisticaly split all sorts of edges, including EH edges. The
4113 optimization passes in between may not have needed them; if not,
4114 we should undo the split.
4116 Recognize this case by having one EH edge incoming to the BB and
4117 one normal edge outgoing; BB should be empty apart from the
4118 post_landing_pad label.
4120 Note that this is slightly different from the empty handler case
4121 handled by cleanup_empty_eh, in that the actual handler may yet
4122 have actual code but the landing pad has been separated from the
4123 handler. As such, cleanup_empty_eh relies on this transformation
4124 having been done first. */
4127 unsplit_eh (eh_landing_pad lp
)
4129 basic_block bb
= label_to_block (cfun
, lp
->post_landing_pad
);
4130 gimple_stmt_iterator gsi
;
4133 /* Quickly check the edge counts on BB for singularity. */
4134 if (!single_pred_p (bb
) || !single_succ_p (bb
))
4136 e_in
= single_pred_edge (bb
);
4137 e_out
= single_succ_edge (bb
);
4139 /* Input edge must be EH and output edge must be normal. */
4140 if ((e_in
->flags
& EDGE_EH
) == 0 || (e_out
->flags
& EDGE_EH
) != 0)
4143 /* The block must be empty except for the labels and debug insns. */
4144 gsi
= gsi_after_labels (bb
);
4145 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4146 gsi_next_nondebug (&gsi
);
4147 if (!gsi_end_p (gsi
))
4150 /* The destination block must not already have a landing pad
4151 for a different region. */
4152 for (gsi
= gsi_start_bb (e_out
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4154 glabel
*label_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
4160 lab
= gimple_label_label (label_stmt
);
4161 lp_nr
= EH_LANDING_PAD_NR (lab
);
4162 if (lp_nr
&& get_eh_region_from_lp_number (lp_nr
) != lp
->region
)
4166 /* The new destination block must not already be a destination of
4167 the source block, lest we merge fallthru and eh edges and get
4168 all sorts of confused. */
4169 if (find_edge (e_in
->src
, e_out
->dest
))
4172 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4173 thought this should have been cleaned up by a phicprop pass, but
4174 that doesn't appear to handle virtuals. Propagate by hand. */
4175 if (!gimple_seq_empty_p (phi_nodes (bb
)))
4177 for (gphi_iterator gpi
= gsi_start_phis (bb
); !gsi_end_p (gpi
); )
4180 gphi
*phi
= gpi
.phi ();
4181 tree lhs
= gimple_phi_result (phi
);
4182 tree rhs
= gimple_phi_arg_def (phi
, 0);
4183 use_operand_p use_p
;
4184 imm_use_iterator iter
;
4186 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, lhs
)
4188 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
4189 SET_USE (use_p
, rhs
);
4192 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4193 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs
) = 1;
4195 remove_phi_node (&gpi
, true);
4199 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4200 fprintf (dump_file
, "Unsplit EH landing pad %d to block %i.\n",
4201 lp
->index
, e_out
->dest
->index
);
4203 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4204 a successor edge, humor it. But do the real CFG change with the
4205 predecessor of E_OUT in order to preserve the ordering of arguments
4206 to the PHI nodes in E_OUT->DEST. */
4207 redirect_eh_edge_1 (e_in
, e_out
->dest
, false);
4208 redirect_edge_pred (e_out
, e_in
->src
);
4209 e_out
->flags
= e_in
->flags
;
4210 e_out
->probability
= e_in
->probability
;
4216 /* Examine each landing pad block and see if it matches unsplit_eh. */
4219 unsplit_all_eh (void)
4221 bool changed
= false;
4225 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
4227 changed
|= unsplit_eh (lp
);
4232 /* Wrapper around unsplit_all_eh that makes it usable everywhere. */
4235 unsplit_eh_edges (void)
4239 /* unsplit_all_eh can die looking up unreachable landing pads. */
4240 maybe_remove_unreachable_handlers ();
4242 changed
= unsplit_all_eh ();
4244 /* If EH edges have been unsplit, delete unreachable forwarder blocks. */
4247 free_dominance_info (CDI_DOMINATORS
);
4248 free_dominance_info (CDI_POST_DOMINATORS
);
4249 delete_unreachable_blocks ();
4253 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4254 to OLD_BB to NEW_BB; return true on success, false on failure.
4256 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4257 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4258 Virtual PHIs may be deleted and marked for renaming. */
4261 cleanup_empty_eh_merge_phis (basic_block new_bb
, basic_block old_bb
,
4262 edge old_bb_out
, bool change_region
)
4264 gphi_iterator ngsi
, ogsi
;
4267 bitmap ophi_handled
;
4269 /* The destination block must not be a regular successor for any
4270 of the preds of the landing pad. Thus, avoid turning
4280 which CFG verification would choke on. See PR45172 and PR51089. */
4281 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4282 if (find_edge (e
->src
, new_bb
))
4285 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4286 redirect_edge_var_map_clear (e
);
4288 ophi_handled
= BITMAP_ALLOC (NULL
);
4290 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4291 for the edges we're going to move. */
4292 for (ngsi
= gsi_start_phis (new_bb
); !gsi_end_p (ngsi
); gsi_next (&ngsi
))
4294 gphi
*ophi
, *nphi
= ngsi
.phi ();
4297 nresult
= gimple_phi_result (nphi
);
4298 nop
= gimple_phi_arg_def (nphi
, old_bb_out
->dest_idx
);
4300 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4301 the source ssa_name. */
4303 for (ogsi
= gsi_start_phis (old_bb
); !gsi_end_p (ogsi
); gsi_next (&ogsi
))
4306 if (gimple_phi_result (ophi
) == nop
)
4311 /* If we did find the corresponding PHI, copy those inputs. */
4314 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4315 if (!has_single_use (nop
))
4317 imm_use_iterator imm_iter
;
4318 use_operand_p use_p
;
4320 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, nop
)
4322 if (!gimple_debug_bind_p (USE_STMT (use_p
))
4323 && (gimple_code (USE_STMT (use_p
)) != GIMPLE_PHI
4324 || gimple_bb (USE_STMT (use_p
)) != new_bb
))
4328 bitmap_set_bit (ophi_handled
, SSA_NAME_VERSION (nop
));
4329 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4334 if ((e
->flags
& EDGE_EH
) == 0)
4336 oop
= gimple_phi_arg_def (ophi
, e
->dest_idx
);
4337 oloc
= gimple_phi_arg_location (ophi
, e
->dest_idx
);
4338 redirect_edge_var_map_add (e
, nresult
, oop
, oloc
);
4341 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4342 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4343 variable is unchanged from input to the block and we can simply
4344 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4348 = gimple_phi_arg_location (nphi
, old_bb_out
->dest_idx
);
4349 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4350 redirect_edge_var_map_add (e
, nresult
, nop
, nloc
);
4354 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4355 we don't know what values from the other edges into NEW_BB to use. */
4356 for (ogsi
= gsi_start_phis (old_bb
); !gsi_end_p (ogsi
); gsi_next (&ogsi
))
4358 gphi
*ophi
= ogsi
.phi ();
4359 tree oresult
= gimple_phi_result (ophi
);
4360 if (!bitmap_bit_p (ophi_handled
, SSA_NAME_VERSION (oresult
)))
4364 /* Finally, move the edges and update the PHIs. */
4365 for (ei
= ei_start (old_bb
->preds
); (e
= ei_safe_edge (ei
)); )
4366 if (e
->flags
& EDGE_EH
)
4368 /* ??? CFG manipluation routines do not try to update loop
4369 form on edge redirection. Do so manually here for now. */
4370 /* If we redirect a loop entry or latch edge that will either create
4371 a multiple entry loop or rotate the loop. If the loops merge
4372 we may have created a loop with multiple latches.
4373 All of this isn't easily fixed thus cancel the affected loop
4374 and mark the other loop as possibly having multiple latches. */
4375 if (e
->dest
== e
->dest
->loop_father
->header
)
4377 mark_loop_for_removal (e
->dest
->loop_father
);
4378 new_bb
->loop_father
->latch
= NULL
;
4379 loops_state_set (LOOPS_MAY_HAVE_MULTIPLE_LATCHES
);
4381 redirect_eh_edge_1 (e
, new_bb
, change_region
);
4382 redirect_edge_succ (e
, new_bb
);
4383 flush_pending_stmts (e
);
4388 BITMAP_FREE (ophi_handled
);
4392 FOR_EACH_EDGE (e
, ei
, old_bb
->preds
)
4393 redirect_edge_var_map_clear (e
);
4394 BITMAP_FREE (ophi_handled
);
4398 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4399 old region to NEW_REGION at BB. */
4402 cleanup_empty_eh_move_lp (basic_block bb
, edge e_out
,
4403 eh_landing_pad lp
, eh_region new_region
)
4405 gimple_stmt_iterator gsi
;
4408 for (pp
= &lp
->region
->landing_pads
; *pp
!= lp
; pp
= &(*pp
)->next_lp
)
4412 lp
->region
= new_region
;
4413 lp
->next_lp
= new_region
->landing_pads
;
4414 new_region
->landing_pads
= lp
;
4416 /* Delete the RESX that was matched within the empty handler block. */
4417 gsi
= gsi_last_bb (bb
);
4418 unlink_stmt_vdef (gsi_stmt (gsi
));
4419 gsi_remove (&gsi
, true);
4421 /* Clean up E_OUT for the fallthru. */
4422 e_out
->flags
= (e_out
->flags
& ~EDGE_EH
) | EDGE_FALLTHRU
;
4423 e_out
->probability
= profile_probability::always ();
4426 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4427 unsplitting than unsplit_eh was prepared to handle, e.g. when
4428 multiple incoming edges and phis are involved. */
4431 cleanup_empty_eh_unsplit (basic_block bb
, edge e_out
, eh_landing_pad lp
)
4433 gimple_stmt_iterator gsi
;
4436 /* We really ought not have totally lost everything following
4437 a landing pad label. Given that BB is empty, there had better
4439 gcc_assert (e_out
!= NULL
);
4441 /* The destination block must not already have a landing pad
4442 for a different region. */
4444 for (gsi
= gsi_start_bb (e_out
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4446 glabel
*stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
4451 lab
= gimple_label_label (stmt
);
4452 lp_nr
= EH_LANDING_PAD_NR (lab
);
4453 if (lp_nr
&& get_eh_region_from_lp_number (lp_nr
) != lp
->region
)
4457 /* Attempt to move the PHIs into the successor block. */
4458 if (cleanup_empty_eh_merge_phis (e_out
->dest
, bb
, e_out
, false))
4460 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4462 "Unsplit EH landing pad %d to block %i "
4463 "(via cleanup_empty_eh).\n",
4464 lp
->index
, e_out
->dest
->index
);
4471 /* Return true if edge E_FIRST is part of an empty infinite loop
4472 or leads to such a loop through a series of single successor
4476 infinite_empty_loop_p (edge e_first
)
4478 bool inf_loop
= false;
4481 if (e_first
->dest
== e_first
->src
)
4484 e_first
->src
->aux
= (void *) 1;
4485 for (e
= e_first
; single_succ_p (e
->dest
); e
= single_succ_edge (e
->dest
))
4487 gimple_stmt_iterator gsi
;
4493 e
->dest
->aux
= (void *) 1;
4494 gsi
= gsi_after_labels (e
->dest
);
4495 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4496 gsi_next_nondebug (&gsi
);
4497 if (!gsi_end_p (gsi
))
4500 e_first
->src
->aux
= NULL
;
4501 for (e
= e_first
; e
->dest
->aux
; e
= single_succ_edge (e
->dest
))
4502 e
->dest
->aux
= NULL
;
4507 /* Examine the block associated with LP to determine if it's an empty
4508 handler for its EH region. If so, attempt to redirect EH edges to
4509 an outer region. Return true the CFG was updated in any way. This
4510 is similar to jump forwarding, just across EH edges. */
4513 cleanup_empty_eh (eh_landing_pad lp
)
4515 basic_block bb
= label_to_block (cfun
, lp
->post_landing_pad
);
4516 gimple_stmt_iterator gsi
;
4518 eh_region new_region
;
4521 bool has_non_eh_pred
;
4525 /* There can be zero or one edges out of BB. This is the quickest test. */
4526 switch (EDGE_COUNT (bb
->succs
))
4532 e_out
= single_succ_edge (bb
);
4538 gsi
= gsi_last_nondebug_bb (bb
);
4539 resx
= gsi_stmt (gsi
);
4540 if (resx
&& is_gimple_resx (resx
))
4542 if (stmt_can_throw_external (cfun
, resx
))
4543 optimize_clobbers (bb
);
4544 else if (sink_clobbers (bb
))
4548 gsi
= gsi_after_labels (bb
);
4550 /* Make sure to skip debug statements. */
4551 if (!gsi_end_p (gsi
) && is_gimple_debug (gsi_stmt (gsi
)))
4552 gsi_next_nondebug (&gsi
);
4554 /* If the block is totally empty, look for more unsplitting cases. */
4555 if (gsi_end_p (gsi
))
4557 /* For the degenerate case of an infinite loop bail out.
4558 If bb has no successors and is totally empty, which can happen e.g.
4559 because of incorrect noreturn attribute, bail out too. */
4561 || infinite_empty_loop_p (e_out
))
4564 return ret
| cleanup_empty_eh_unsplit (bb
, e_out
, lp
);
4567 /* The block should consist only of a single RESX statement, modulo a
4568 preceding call to __builtin_stack_restore if there is no outgoing
4569 edge, since the call can be eliminated in this case. */
4570 resx
= gsi_stmt (gsi
);
4571 if (!e_out
&& gimple_call_builtin_p (resx
, BUILT_IN_STACK_RESTORE
))
4573 gsi_next_nondebug (&gsi
);
4574 resx
= gsi_stmt (gsi
);
4576 if (!is_gimple_resx (resx
))
4578 gcc_assert (gsi_one_nondebug_before_end_p (gsi
));
4580 /* Determine if there are non-EH edges, or resx edges into the handler. */
4581 has_non_eh_pred
= false;
4582 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
4583 if (!(e
->flags
& EDGE_EH
))
4584 has_non_eh_pred
= true;
4586 /* Find the handler that's outer of the empty handler by looking at
4587 where the RESX instruction was vectored. */
4588 new_lp_nr
= lookup_stmt_eh_lp (resx
);
4589 new_region
= get_eh_region_from_lp_number (new_lp_nr
);
4591 /* If there's no destination region within the current function,
4592 redirection is trivial via removing the throwing statements from
4593 the EH region, removing the EH edges, and allowing the block
4594 to go unreachable. */
4595 if (new_region
== NULL
)
4597 gcc_assert (e_out
== NULL
);
4598 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
4599 if (e
->flags
& EDGE_EH
)
4601 gimple
*stmt
= last_stmt (e
->src
);
4602 remove_stmt_from_eh_lp (stmt
);
4610 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4611 to handle the abort and allow the blocks to go unreachable. */
4612 if (new_region
->type
== ERT_MUST_NOT_THROW
)
4614 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
4615 if (e
->flags
& EDGE_EH
)
4617 gimple
*stmt
= last_stmt (e
->src
);
4618 remove_stmt_from_eh_lp (stmt
);
4619 add_stmt_to_eh_lp (stmt
, new_lp_nr
);
4627 /* Try to redirect the EH edges and merge the PHIs into the destination
4628 landing pad block. If the merge succeeds, we'll already have redirected
4629 all the EH edges. The handler itself will go unreachable if there were
4631 if (cleanup_empty_eh_merge_phis (e_out
->dest
, bb
, e_out
, true))
4634 /* Finally, if all input edges are EH edges, then we can (potentially)
4635 reduce the number of transfers from the runtime by moving the landing
4636 pad from the original region to the new region. This is a win when
4637 we remove the last CLEANUP region along a particular exception
4638 propagation path. Since nothing changes except for the region with
4639 which the landing pad is associated, the PHI nodes do not need to be
4641 if (!has_non_eh_pred
)
4643 cleanup_empty_eh_move_lp (bb
, e_out
, lp
, new_region
);
4644 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4645 fprintf (dump_file
, "Empty EH handler %i moved to EH region %i.\n",
4646 lp
->index
, new_region
->index
);
4648 /* ??? The CFG didn't change, but we may have rendered the
4649 old EH region unreachable. Trigger a cleanup there. */
4656 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4657 fprintf (dump_file
, "Empty EH handler %i removed.\n", lp
->index
);
4658 remove_eh_landing_pad (lp
);
4662 /* Do a post-order traversal of the EH region tree. Examine each
4663 post_landing_pad block and see if we can eliminate it as empty. */
4666 cleanup_all_empty_eh (void)
4668 bool changed
= false;
4672 for (i
= 1; vec_safe_iterate (cfun
->eh
->lp_array
, i
, &lp
); ++i
)
4674 changed
|= cleanup_empty_eh (lp
);
4679 /* Perform cleanups and lowering of exception handling
4680 1) cleanups regions with handlers doing nothing are optimized out
4681 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4682 3) Info about regions that are containing instructions, and regions
4683 reachable via local EH edges is collected
4684 4) Eh tree is pruned for regions no longer necessary.
4686 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4687 Unify those that have the same failure decl and locus.
4691 execute_cleanup_eh_1 (void)
4693 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4694 looking up unreachable landing pads. */
4695 remove_unreachable_handlers ();
4697 /* Watch out for the region tree vanishing due to all unreachable. */
4698 if (cfun
->eh
->region_tree
)
4700 bool changed
= false;
4703 changed
|= unsplit_all_eh ();
4704 changed
|= cleanup_all_empty_eh ();
4708 free_dominance_info (CDI_DOMINATORS
);
4709 free_dominance_info (CDI_POST_DOMINATORS
);
4711 /* We delayed all basic block deletion, as we may have performed
4712 cleanups on EH edges while non-EH edges were still present. */
4713 delete_unreachable_blocks ();
4715 /* We manipulated the landing pads. Remove any region that no
4716 longer has a landing pad. */
4717 remove_unreachable_handlers_no_lp ();
4719 return TODO_cleanup_cfg
| TODO_update_ssa_only_virtuals
;
4728 const pass_data pass_data_cleanup_eh
=
4730 GIMPLE_PASS
, /* type */
4731 "ehcleanup", /* name */
4732 OPTGROUP_NONE
, /* optinfo_flags */
4733 TV_TREE_EH
, /* tv_id */
4734 PROP_gimple_lcf
, /* properties_required */
4735 0, /* properties_provided */
4736 0, /* properties_destroyed */
4737 0, /* todo_flags_start */
4738 0, /* todo_flags_finish */
4741 class pass_cleanup_eh
: public gimple_opt_pass
4744 pass_cleanup_eh (gcc::context
*ctxt
)
4745 : gimple_opt_pass (pass_data_cleanup_eh
, ctxt
)
4748 /* opt_pass methods: */
4749 opt_pass
* clone () { return new pass_cleanup_eh (m_ctxt
); }
4750 virtual bool gate (function
*fun
)
4752 return fun
->eh
!= NULL
&& fun
->eh
->region_tree
!= NULL
;
4755 virtual unsigned int execute (function
*);
4757 }; // class pass_cleanup_eh
4760 pass_cleanup_eh::execute (function
*fun
)
4762 int ret
= execute_cleanup_eh_1 ();
4764 /* If the function no longer needs an EH personality routine
4765 clear it. This exposes cross-language inlining opportunities
4766 and avoids references to a never defined personality routine. */
4767 if (DECL_FUNCTION_PERSONALITY (current_function_decl
)
4768 && function_needs_eh_personality (fun
) != eh_personality_lang
)
4769 DECL_FUNCTION_PERSONALITY (current_function_decl
) = NULL_TREE
;
4777 make_pass_cleanup_eh (gcc::context
*ctxt
)
4779 return new pass_cleanup_eh (ctxt
);
4782 /* Disable warnings about missing quoting in GCC diagnostics for
4783 the verification errors. Their format strings don't follow GCC
4784 diagnostic conventions but are only used for debugging. */
4786 # pragma GCC diagnostic push
4787 # pragma GCC diagnostic ignored "-Wformat-diag"
4790 /* Verify that BB containing STMT as the last statement, has precisely the
4791 edge that make_eh_edges would create. */
4794 verify_eh_edges (gimple
*stmt
)
4796 basic_block bb
= gimple_bb (stmt
);
4797 eh_landing_pad lp
= NULL
;
4802 lp_nr
= lookup_stmt_eh_lp (stmt
);
4804 lp
= get_eh_landing_pad_from_number (lp_nr
);
4807 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4809 if (e
->flags
& EDGE_EH
)
4813 error ("BB %i has multiple EH edges", bb
->index
);
4825 error ("BB %i cannot throw but has an EH edge", bb
->index
);
4831 if (!stmt_could_throw_p (cfun
, stmt
))
4833 error ("BB %i last statement has incorrectly set lp", bb
->index
);
4837 if (eh_edge
== NULL
)
4839 error ("BB %i is missing an EH edge", bb
->index
);
4843 if (eh_edge
->dest
!= label_to_block (cfun
, lp
->post_landing_pad
))
4845 error ("Incorrect EH edge %i->%i", bb
->index
, eh_edge
->dest
->index
);
4852 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4855 verify_eh_dispatch_edge (geh_dispatch
*stmt
)
4859 basic_block src
, dst
;
4860 bool want_fallthru
= true;
4864 r
= get_eh_region_from_number (gimple_eh_dispatch_region (stmt
));
4865 src
= gimple_bb (stmt
);
4867 FOR_EACH_EDGE (e
, ei
, src
->succs
)
4868 gcc_assert (e
->aux
== NULL
);
4873 for (c
= r
->u
.eh_try
.first_catch
; c
; c
= c
->next_catch
)
4875 dst
= label_to_block (cfun
, c
->label
);
4876 e
= find_edge (src
, dst
);
4879 error ("BB %i is missing an edge", src
->index
);
4884 /* A catch-all handler doesn't have a fallthru. */
4885 if (c
->type_list
== NULL
)
4887 want_fallthru
= false;
4893 case ERT_ALLOWED_EXCEPTIONS
:
4894 dst
= label_to_block (cfun
, r
->u
.allowed
.label
);
4895 e
= find_edge (src
, dst
);
4898 error ("BB %i is missing an edge", src
->index
);
4909 FOR_EACH_EDGE (e
, ei
, src
->succs
)
4911 if (e
->flags
& EDGE_FALLTHRU
)
4913 if (fall_edge
!= NULL
)
4915 error ("BB %i too many fallthru edges", src
->index
);
4924 error ("BB %i has incorrect edge", src
->index
);
4928 if ((fall_edge
!= NULL
) ^ want_fallthru
)
4930 error ("BB %i has incorrect fallthru edge", src
->index
);
4938 # pragma GCC diagnostic pop