tree-eh.c (do_return_redirection): Remove return_value_p parameter.
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "except.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
35 #include "timevar.h"
36 #include "langhooks.h"
37 #include "ggc.h"
38 #include "diagnostic-core.h"
39 #include "gimple.h"
40 #include "target.h"
41
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
45
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
48
49 void
50 using_eh_for_cleanups (void)
51 {
52 using_eh_for_cleanups_p = 1;
53 }
54
55 /* Misc functions used in this file. */
56
57 /* Compare and hash for any structure which begins with a canonical
58 pointer. Assumes all pointers are interchangeable, which is sort
59 of already assumed by gcc elsewhere IIRC. */
60
61 static int
62 struct_ptr_eq (const void *a, const void *b)
63 {
64 const void * const * x = (const void * const *) a;
65 const void * const * y = (const void * const *) b;
66 return *x == *y;
67 }
68
69 static hashval_t
70 struct_ptr_hash (const void *a)
71 {
72 const void * const * x = (const void * const *) a;
73 return (size_t)*x >> 4;
74 }
75
76
77 /* Remember and lookup EH landing pad data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
80
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
84
85 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
88
89 /* Add statement T in function IFUN to landing pad NUM. */
90
91 void
92 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
93 {
94 struct throw_stmt_node *n;
95 void **slot;
96
97 gcc_assert (num != 0);
98
99 n = ggc_alloc_throw_stmt_node ();
100 n->stmt = t;
101 n->lp_nr = num;
102
103 if (!get_eh_throw_stmt_table (ifun))
104 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
105 struct_ptr_eq,
106 ggc_free));
107
108 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
109 gcc_assert (!*slot);
110 *slot = n;
111 }
112
113 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
114
115 void
116 add_stmt_to_eh_lp (gimple t, int num)
117 {
118 add_stmt_to_eh_lp_fn (cfun, t, num);
119 }
120
121 /* Add statement T to the single EH landing pad in REGION. */
122
123 static void
124 record_stmt_eh_region (eh_region region, gimple t)
125 {
126 if (region == NULL)
127 return;
128 if (region->type == ERT_MUST_NOT_THROW)
129 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
130 else
131 {
132 eh_landing_pad lp = region->landing_pads;
133 if (lp == NULL)
134 lp = gen_eh_landing_pad (region);
135 else
136 gcc_assert (lp->next_lp == NULL);
137 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
138 }
139 }
140
141
142 /* Remove statement T in function IFUN from its EH landing pad. */
143
144 bool
145 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
146 {
147 struct throw_stmt_node dummy;
148 void **slot;
149
150 if (!get_eh_throw_stmt_table (ifun))
151 return false;
152
153 dummy.stmt = t;
154 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
155 NO_INSERT);
156 if (slot)
157 {
158 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
159 return true;
160 }
161 else
162 return false;
163 }
164
165
166 /* Remove statement T in the current function (cfun) from its
167 EH landing pad. */
168
169 bool
170 remove_stmt_from_eh_lp (gimple t)
171 {
172 return remove_stmt_from_eh_lp_fn (cfun, t);
173 }
174
175 /* Determine if statement T is inside an EH region in function IFUN.
176 Positive numbers indicate a landing pad index; negative numbers
177 indicate a MUST_NOT_THROW region index; zero indicates that the
178 statement is not recorded in the region table. */
179
180 int
181 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
182 {
183 struct throw_stmt_node *p, n;
184
185 if (ifun->eh->throw_stmt_table == NULL)
186 return 0;
187
188 n.stmt = t;
189 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
190 return p ? p->lp_nr : 0;
191 }
192
193 /* Likewise, but always use the current function. */
194
195 int
196 lookup_stmt_eh_lp (gimple t)
197 {
198 /* We can get called from initialized data when -fnon-call-exceptions
199 is on; prevent crash. */
200 if (!cfun)
201 return 0;
202 return lookup_stmt_eh_lp_fn (cfun, t);
203 }
204
205 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
206 nodes and LABEL_DECL nodes. We will use this during the second phase to
207 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
208
209 struct finally_tree_node
210 {
211 /* When storing a GIMPLE_TRY, we have to record a gimple. However
212 when deciding whether a GOTO to a certain LABEL_DECL (which is a
213 tree) leaves the TRY block, its necessary to record a tree in
214 this field. Thus a treemple is used. */
215 treemple child;
216 gimple parent;
217 };
218
219 /* Note that this table is *not* marked GTY. It is short-lived. */
220 static htab_t finally_tree;
221
222 static void
223 record_in_finally_tree (treemple child, gimple parent)
224 {
225 struct finally_tree_node *n;
226 void **slot;
227
228 n = XNEW (struct finally_tree_node);
229 n->child = child;
230 n->parent = parent;
231
232 slot = htab_find_slot (finally_tree, n, INSERT);
233 gcc_assert (!*slot);
234 *slot = n;
235 }
236
237 static void
238 collect_finally_tree (gimple stmt, gimple region);
239
240 /* Go through the gimple sequence. Works with collect_finally_tree to
241 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
242
243 static void
244 collect_finally_tree_1 (gimple_seq seq, gimple region)
245 {
246 gimple_stmt_iterator gsi;
247
248 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
249 collect_finally_tree (gsi_stmt (gsi), region);
250 }
251
252 static void
253 collect_finally_tree (gimple stmt, gimple region)
254 {
255 treemple temp;
256
257 switch (gimple_code (stmt))
258 {
259 case GIMPLE_LABEL:
260 temp.t = gimple_label_label (stmt);
261 record_in_finally_tree (temp, region);
262 break;
263
264 case GIMPLE_TRY:
265 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
266 {
267 temp.g = stmt;
268 record_in_finally_tree (temp, region);
269 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
270 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
271 }
272 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
273 {
274 collect_finally_tree_1 (gimple_try_eval (stmt), region);
275 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
276 }
277 break;
278
279 case GIMPLE_CATCH:
280 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
281 break;
282
283 case GIMPLE_EH_FILTER:
284 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
285 break;
286
287 default:
288 /* A type, a decl, or some kind of statement that we're not
289 interested in. Don't walk them. */
290 break;
291 }
292 }
293
294
295 /* Use the finally tree to determine if a jump from START to TARGET
296 would leave the try_finally node that START lives in. */
297
298 static bool
299 outside_finally_tree (treemple start, gimple target)
300 {
301 struct finally_tree_node n, *p;
302
303 do
304 {
305 n.child = start;
306 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
307 if (!p)
308 return true;
309 start.g = p->parent;
310 }
311 while (start.g != target);
312
313 return false;
314 }
315
316 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
317 nodes into a set of gotos, magic labels, and eh regions.
318 The eh region creation is straight-forward, but frobbing all the gotos
319 and such into shape isn't. */
320
321 /* The sequence into which we record all EH stuff. This will be
322 placed at the end of the function when we're all done. */
323 static gimple_seq eh_seq;
324
325 /* Record whether an EH region contains something that can throw,
326 indexed by EH region number. */
327 static bitmap eh_region_may_contain_throw_map;
328
329 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
330 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
331 The idea is to record a gimple statement for everything except for
332 the conditionals, which get their labels recorded. Since labels are
333 of type 'tree', we need this node to store both gimple and tree
334 objects. REPL_STMT is the sequence used to replace the goto/return
335 statement. CONT_STMT is used to store the statement that allows
336 the return/goto to jump to the original destination. */
337
338 struct goto_queue_node
339 {
340 treemple stmt;
341 gimple_seq repl_stmt;
342 gimple cont_stmt;
343 int index;
344 /* This is used when index >= 0 to indicate that stmt is a label (as
345 opposed to a goto stmt). */
346 int is_label;
347 };
348
349 /* State of the world while lowering. */
350
351 struct leh_state
352 {
353 /* What's "current" while constructing the eh region tree. These
354 correspond to variables of the same name in cfun->eh, which we
355 don't have easy access to. */
356 eh_region cur_region;
357
358 /* What's "current" for the purposes of __builtin_eh_pointer. For
359 a CATCH, this is the associated TRY. For an EH_FILTER, this is
360 the associated ALLOWED_EXCEPTIONS, etc. */
361 eh_region ehp_region;
362
363 /* Processing of TRY_FINALLY requires a bit more state. This is
364 split out into a separate structure so that we don't have to
365 copy so much when processing other nodes. */
366 struct leh_tf_state *tf;
367 };
368
369 struct leh_tf_state
370 {
371 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
372 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
373 this so that outside_finally_tree can reliably reference the tree used
374 in the collect_finally_tree data structures. */
375 gimple try_finally_expr;
376 gimple top_p;
377
378 /* While lowering a top_p usually it is expanded into multiple statements,
379 thus we need the following field to store them. */
380 gimple_seq top_p_seq;
381
382 /* The state outside this try_finally node. */
383 struct leh_state *outer;
384
385 /* The exception region created for it. */
386 eh_region region;
387
388 /* The goto queue. */
389 struct goto_queue_node *goto_queue;
390 size_t goto_queue_size;
391 size_t goto_queue_active;
392
393 /* Pointer map to help in searching goto_queue when it is large. */
394 struct pointer_map_t *goto_queue_map;
395
396 /* The set of unique labels seen as entries in the goto queue. */
397 VEC(tree,heap) *dest_array;
398
399 /* A label to be added at the end of the completed transformed
400 sequence. It will be set if may_fallthru was true *at one time*,
401 though subsequent transformations may have cleared that flag. */
402 tree fallthru_label;
403
404 /* True if it is possible to fall out the bottom of the try block.
405 Cleared if the fallthru is converted to a goto. */
406 bool may_fallthru;
407
408 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
409 bool may_return;
410
411 /* True if the finally block can receive an exception edge.
412 Cleared if the exception case is handled by code duplication. */
413 bool may_throw;
414 };
415
416 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
417
418 /* Search for STMT in the goto queue. Return the replacement,
419 or null if the statement isn't in the queue. */
420
421 #define LARGE_GOTO_QUEUE 20
422
423 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
424
425 static gimple_seq
426 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
427 {
428 unsigned int i;
429 void **slot;
430
431 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
432 {
433 for (i = 0; i < tf->goto_queue_active; i++)
434 if ( tf->goto_queue[i].stmt.g == stmt.g)
435 return tf->goto_queue[i].repl_stmt;
436 return NULL;
437 }
438
439 /* If we have a large number of entries in the goto_queue, create a
440 pointer map and use that for searching. */
441
442 if (!tf->goto_queue_map)
443 {
444 tf->goto_queue_map = pointer_map_create ();
445 for (i = 0; i < tf->goto_queue_active; i++)
446 {
447 slot = pointer_map_insert (tf->goto_queue_map,
448 tf->goto_queue[i].stmt.g);
449 gcc_assert (*slot == NULL);
450 *slot = &tf->goto_queue[i];
451 }
452 }
453
454 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
455 if (slot != NULL)
456 return (((struct goto_queue_node *) *slot)->repl_stmt);
457
458 return NULL;
459 }
460
461 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
462 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
463 then we can just splat it in, otherwise we add the new stmts immediately
464 after the GIMPLE_COND and redirect. */
465
466 static void
467 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
468 gimple_stmt_iterator *gsi)
469 {
470 tree label;
471 gimple_seq new_seq;
472 treemple temp;
473 location_t loc = gimple_location (gsi_stmt (*gsi));
474
475 temp.tp = tp;
476 new_seq = find_goto_replacement (tf, temp);
477 if (!new_seq)
478 return;
479
480 if (gimple_seq_singleton_p (new_seq)
481 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
482 {
483 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
484 return;
485 }
486
487 label = create_artificial_label (loc);
488 /* Set the new label for the GIMPLE_COND */
489 *tp = label;
490
491 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
492 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
493 }
494
495 /* The real work of replace_goto_queue. Returns with TSI updated to
496 point to the next statement. */
497
498 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
499
500 static void
501 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
502 gimple_stmt_iterator *gsi)
503 {
504 gimple_seq seq;
505 treemple temp;
506 temp.g = NULL;
507
508 switch (gimple_code (stmt))
509 {
510 case GIMPLE_GOTO:
511 case GIMPLE_RETURN:
512 temp.g = stmt;
513 seq = find_goto_replacement (tf, temp);
514 if (seq)
515 {
516 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
517 gsi_remove (gsi, false);
518 return;
519 }
520 break;
521
522 case GIMPLE_COND:
523 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
524 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
525 break;
526
527 case GIMPLE_TRY:
528 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
529 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
530 break;
531 case GIMPLE_CATCH:
532 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
533 break;
534 case GIMPLE_EH_FILTER:
535 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
536 break;
537
538 default:
539 /* These won't have gotos in them. */
540 break;
541 }
542
543 gsi_next (gsi);
544 }
545
546 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
547
548 static void
549 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
550 {
551 gimple_stmt_iterator gsi = gsi_start (seq);
552
553 while (!gsi_end_p (gsi))
554 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
555 }
556
557 /* Replace all goto queue members. */
558
559 static void
560 replace_goto_queue (struct leh_tf_state *tf)
561 {
562 if (tf->goto_queue_active == 0)
563 return;
564 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
565 replace_goto_queue_stmt_list (eh_seq, tf);
566 }
567
568 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
569 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
570 a gimple return. */
571
572 static void
573 record_in_goto_queue (struct leh_tf_state *tf,
574 treemple new_stmt,
575 int index,
576 bool is_label)
577 {
578 size_t active, size;
579 struct goto_queue_node *q;
580
581 gcc_assert (!tf->goto_queue_map);
582
583 active = tf->goto_queue_active;
584 size = tf->goto_queue_size;
585 if (active >= size)
586 {
587 size = (size ? size * 2 : 32);
588 tf->goto_queue_size = size;
589 tf->goto_queue
590 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
591 }
592
593 q = &tf->goto_queue[active];
594 tf->goto_queue_active = active + 1;
595
596 memset (q, 0, sizeof (*q));
597 q->stmt = new_stmt;
598 q->index = index;
599 q->is_label = is_label;
600 }
601
602 /* Record the LABEL label in the goto queue contained in TF.
603 TF is not null. */
604
605 static void
606 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
607 {
608 int index;
609 treemple temp, new_stmt;
610
611 if (!label)
612 return;
613
614 /* Computed and non-local gotos do not get processed. Given
615 their nature we can neither tell whether we've escaped the
616 finally block nor redirect them if we knew. */
617 if (TREE_CODE (label) != LABEL_DECL)
618 return;
619
620 /* No need to record gotos that don't leave the try block. */
621 temp.t = label;
622 if (!outside_finally_tree (temp, tf->try_finally_expr))
623 return;
624
625 if (! tf->dest_array)
626 {
627 tf->dest_array = VEC_alloc (tree, heap, 10);
628 VEC_quick_push (tree, tf->dest_array, label);
629 index = 0;
630 }
631 else
632 {
633 int n = VEC_length (tree, tf->dest_array);
634 for (index = 0; index < n; ++index)
635 if (VEC_index (tree, tf->dest_array, index) == label)
636 break;
637 if (index == n)
638 VEC_safe_push (tree, heap, tf->dest_array, label);
639 }
640
641 /* In the case of a GOTO we want to record the destination label,
642 since with a GIMPLE_COND we have an easy access to the then/else
643 labels. */
644 new_stmt = stmt;
645 record_in_goto_queue (tf, new_stmt, index, true);
646 }
647
648 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
649 node, and if so record that fact in the goto queue associated with that
650 try_finally node. */
651
652 static void
653 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
654 {
655 struct leh_tf_state *tf = state->tf;
656 treemple new_stmt;
657
658 if (!tf)
659 return;
660
661 switch (gimple_code (stmt))
662 {
663 case GIMPLE_COND:
664 new_stmt.tp = gimple_op_ptr (stmt, 2);
665 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
666 new_stmt.tp = gimple_op_ptr (stmt, 3);
667 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
668 break;
669 case GIMPLE_GOTO:
670 new_stmt.g = stmt;
671 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
672 break;
673
674 case GIMPLE_RETURN:
675 tf->may_return = true;
676 new_stmt.g = stmt;
677 record_in_goto_queue (tf, new_stmt, -1, false);
678 break;
679
680 default:
681 gcc_unreachable ();
682 }
683 }
684
685
686 #ifdef ENABLE_CHECKING
687 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
688 was in fact structured, and we've not yet done jump threading, then none
689 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
690
691 static void
692 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
693 {
694 struct leh_tf_state *tf = state->tf;
695 size_t i, n;
696
697 if (!tf)
698 return;
699
700 n = gimple_switch_num_labels (switch_expr);
701
702 for (i = 0; i < n; ++i)
703 {
704 treemple temp;
705 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
706 temp.t = lab;
707 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
708 }
709 }
710 #else
711 #define verify_norecord_switch_expr(state, switch_expr)
712 #endif
713
714 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
715 non-null, insert it before the new branch. */
716
717 static void
718 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
719 {
720 gimple x;
721
722 /* In the case of a return, the queue node must be a gimple statement. */
723 gcc_assert (!q->is_label);
724
725 /* Note that the return value may have already been computed, e.g.,
726
727 int x;
728 int foo (void)
729 {
730 x = 0;
731 try {
732 return x;
733 } finally {
734 x++;
735 }
736 }
737
738 should return 0, not 1. We don't have to do anything to make
739 this happens because the return value has been placed in the
740 RESULT_DECL already. */
741
742 q->cont_stmt = q->stmt.g;
743
744 if (!q->repl_stmt)
745 q->repl_stmt = gimple_seq_alloc ();
746
747 if (mod)
748 gimple_seq_add_seq (&q->repl_stmt, mod);
749
750 x = gimple_build_goto (finlab);
751 gimple_seq_add_stmt (&q->repl_stmt, x);
752 }
753
754 /* Similar, but easier, for GIMPLE_GOTO. */
755
756 static void
757 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
758 struct leh_tf_state *tf)
759 {
760 gimple x;
761
762 gcc_assert (q->is_label);
763 if (!q->repl_stmt)
764 q->repl_stmt = gimple_seq_alloc ();
765
766 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
767
768 if (mod)
769 gimple_seq_add_seq (&q->repl_stmt, mod);
770
771 x = gimple_build_goto (finlab);
772 gimple_seq_add_stmt (&q->repl_stmt, x);
773 }
774
775 /* Emit a standard landing pad sequence into SEQ for REGION. */
776
777 static void
778 emit_post_landing_pad (gimple_seq *seq, eh_region region)
779 {
780 eh_landing_pad lp = region->landing_pads;
781 gimple x;
782
783 if (lp == NULL)
784 lp = gen_eh_landing_pad (region);
785
786 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
787 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
788
789 x = gimple_build_label (lp->post_landing_pad);
790 gimple_seq_add_stmt (seq, x);
791 }
792
793 /* Emit a RESX statement into SEQ for REGION. */
794
795 static void
796 emit_resx (gimple_seq *seq, eh_region region)
797 {
798 gimple x = gimple_build_resx (region->index);
799 gimple_seq_add_stmt (seq, x);
800 if (region->outer)
801 record_stmt_eh_region (region->outer, x);
802 }
803
804 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
805
806 static void
807 emit_eh_dispatch (gimple_seq *seq, eh_region region)
808 {
809 gimple x = gimple_build_eh_dispatch (region->index);
810 gimple_seq_add_stmt (seq, x);
811 }
812
813 /* Note that the current EH region may contain a throw, or a
814 call to a function which itself may contain a throw. */
815
816 static void
817 note_eh_region_may_contain_throw (eh_region region)
818 {
819 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
820 {
821 if (region->type == ERT_MUST_NOT_THROW)
822 break;
823 region = region->outer;
824 if (region == NULL)
825 break;
826 }
827 }
828
829 /* Check if REGION has been marked as containing a throw. If REGION is
830 NULL, this predicate is false. */
831
832 static inline bool
833 eh_region_may_contain_throw (eh_region r)
834 {
835 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
836 }
837
838 /* We want to transform
839 try { body; } catch { stuff; }
840 to
841 normal_seqence:
842 body;
843 over:
844 eh_seqence:
845 landing_pad:
846 stuff;
847 goto over;
848
849 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
850 should be placed before the second operand, or NULL. OVER is
851 an existing label that should be put at the exit, or NULL. */
852
853 static gimple_seq
854 frob_into_branch_around (gimple tp, eh_region region, tree over)
855 {
856 gimple x;
857 gimple_seq cleanup, result;
858 location_t loc = gimple_location (tp);
859
860 cleanup = gimple_try_cleanup (tp);
861 result = gimple_try_eval (tp);
862
863 if (region)
864 emit_post_landing_pad (&eh_seq, region);
865
866 if (gimple_seq_may_fallthru (cleanup))
867 {
868 if (!over)
869 over = create_artificial_label (loc);
870 x = gimple_build_goto (over);
871 gimple_seq_add_stmt (&cleanup, x);
872 }
873 gimple_seq_add_seq (&eh_seq, cleanup);
874
875 if (over)
876 {
877 x = gimple_build_label (over);
878 gimple_seq_add_stmt (&result, x);
879 }
880 return result;
881 }
882
883 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
884 Make sure to record all new labels found. */
885
886 static gimple_seq
887 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
888 {
889 gimple region = NULL;
890 gimple_seq new_seq;
891
892 new_seq = copy_gimple_seq_and_replace_locals (seq);
893
894 if (outer_state->tf)
895 region = outer_state->tf->try_finally_expr;
896 collect_finally_tree_1 (new_seq, region);
897
898 return new_seq;
899 }
900
901 /* A subroutine of lower_try_finally. Create a fallthru label for
902 the given try_finally state. The only tricky bit here is that
903 we have to make sure to record the label in our outer context. */
904
905 static tree
906 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
907 {
908 tree label = tf->fallthru_label;
909 treemple temp;
910
911 if (!label)
912 {
913 label = create_artificial_label (gimple_location (tf->try_finally_expr));
914 tf->fallthru_label = label;
915 if (tf->outer->tf)
916 {
917 temp.t = label;
918 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
919 }
920 }
921 return label;
922 }
923
924 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
925 langhook returns non-null, then the language requires that the exception
926 path out of a try_finally be treated specially. To wit: the code within
927 the finally block may not itself throw an exception. We have two choices
928 here. First we can duplicate the finally block and wrap it in a
929 must_not_throw region. Second, we can generate code like
930
931 try {
932 finally_block;
933 } catch {
934 if (fintmp == eh_edge)
935 protect_cleanup_actions;
936 }
937
938 where "fintmp" is the temporary used in the switch statement generation
939 alternative considered below. For the nonce, we always choose the first
940 option.
941
942 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
943
944 static void
945 honor_protect_cleanup_actions (struct leh_state *outer_state,
946 struct leh_state *this_state,
947 struct leh_tf_state *tf)
948 {
949 tree protect_cleanup_actions;
950 gimple_stmt_iterator gsi;
951 bool finally_may_fallthru;
952 gimple_seq finally;
953 gimple x;
954
955 /* First check for nothing to do. */
956 if (lang_hooks.eh_protect_cleanup_actions == NULL)
957 return;
958 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
959 if (protect_cleanup_actions == NULL)
960 return;
961
962 finally = gimple_try_cleanup (tf->top_p);
963 finally_may_fallthru = gimple_seq_may_fallthru (finally);
964
965 /* Duplicate the FINALLY block. Only need to do this for try-finally,
966 and not for cleanups. */
967 if (this_state)
968 finally = lower_try_finally_dup_block (finally, outer_state);
969
970 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
971 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
972 to be in an enclosing scope, but needs to be implemented at this level
973 to avoid a nesting violation (see wrap_temporary_cleanups in
974 cp/decl.c). Since it's logically at an outer level, we should call
975 terminate before we get to it, so strip it away before adding the
976 MUST_NOT_THROW filter. */
977 gsi = gsi_start (finally);
978 x = gsi_stmt (gsi);
979 if (gimple_code (x) == GIMPLE_TRY
980 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
981 && gimple_try_catch_is_cleanup (x))
982 {
983 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
984 gsi_remove (&gsi, false);
985 }
986
987 /* Wrap the block with protect_cleanup_actions as the action. */
988 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
989 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
990 GIMPLE_TRY_CATCH);
991 finally = lower_eh_must_not_throw (outer_state, x);
992
993 /* Drop all of this into the exception sequence. */
994 emit_post_landing_pad (&eh_seq, tf->region);
995 gimple_seq_add_seq (&eh_seq, finally);
996 if (finally_may_fallthru)
997 emit_resx (&eh_seq, tf->region);
998
999 /* Having now been handled, EH isn't to be considered with
1000 the rest of the outgoing edges. */
1001 tf->may_throw = false;
1002 }
1003
1004 /* A subroutine of lower_try_finally. We have determined that there is
1005 no fallthru edge out of the finally block. This means that there is
1006 no outgoing edge corresponding to any incoming edge. Restructure the
1007 try_finally node for this special case. */
1008
1009 static void
1010 lower_try_finally_nofallthru (struct leh_state *state,
1011 struct leh_tf_state *tf)
1012 {
1013 tree lab;
1014 gimple x;
1015 gimple_seq finally;
1016 struct goto_queue_node *q, *qe;
1017
1018 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1019
1020 /* We expect that tf->top_p is a GIMPLE_TRY. */
1021 finally = gimple_try_cleanup (tf->top_p);
1022 tf->top_p_seq = gimple_try_eval (tf->top_p);
1023
1024 x = gimple_build_label (lab);
1025 gimple_seq_add_stmt (&tf->top_p_seq, x);
1026
1027 q = tf->goto_queue;
1028 qe = q + tf->goto_queue_active;
1029 for (; q < qe; ++q)
1030 if (q->index < 0)
1031 do_return_redirection (q, lab, NULL);
1032 else
1033 do_goto_redirection (q, lab, NULL, tf);
1034
1035 replace_goto_queue (tf);
1036
1037 lower_eh_constructs_1 (state, finally);
1038 gimple_seq_add_seq (&tf->top_p_seq, finally);
1039
1040 if (tf->may_throw)
1041 {
1042 emit_post_landing_pad (&eh_seq, tf->region);
1043
1044 x = gimple_build_goto (lab);
1045 gimple_seq_add_stmt (&eh_seq, x);
1046 }
1047 }
1048
1049 /* A subroutine of lower_try_finally. We have determined that there is
1050 exactly one destination of the finally block. Restructure the
1051 try_finally node for this special case. */
1052
1053 static void
1054 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1055 {
1056 struct goto_queue_node *q, *qe;
1057 gimple x;
1058 gimple_seq finally;
1059 tree finally_label;
1060 location_t loc = gimple_location (tf->try_finally_expr);
1061
1062 finally = gimple_try_cleanup (tf->top_p);
1063 tf->top_p_seq = gimple_try_eval (tf->top_p);
1064
1065 lower_eh_constructs_1 (state, finally);
1066
1067 if (tf->may_throw)
1068 {
1069 /* Only reachable via the exception edge. Add the given label to
1070 the head of the FINALLY block. Append a RESX at the end. */
1071 emit_post_landing_pad (&eh_seq, tf->region);
1072 gimple_seq_add_seq (&eh_seq, finally);
1073 emit_resx (&eh_seq, tf->region);
1074 return;
1075 }
1076
1077 if (tf->may_fallthru)
1078 {
1079 /* Only reachable via the fallthru edge. Do nothing but let
1080 the two blocks run together; we'll fall out the bottom. */
1081 gimple_seq_add_seq (&tf->top_p_seq, finally);
1082 return;
1083 }
1084
1085 finally_label = create_artificial_label (loc);
1086 x = gimple_build_label (finally_label);
1087 gimple_seq_add_stmt (&tf->top_p_seq, x);
1088
1089 gimple_seq_add_seq (&tf->top_p_seq, finally);
1090
1091 q = tf->goto_queue;
1092 qe = q + tf->goto_queue_active;
1093
1094 if (tf->may_return)
1095 {
1096 /* Reachable by return expressions only. Redirect them. */
1097 for (; q < qe; ++q)
1098 do_return_redirection (q, finally_label, NULL);
1099 replace_goto_queue (tf);
1100 }
1101 else
1102 {
1103 /* Reachable by goto expressions only. Redirect them. */
1104 for (; q < qe; ++q)
1105 do_goto_redirection (q, finally_label, NULL, tf);
1106 replace_goto_queue (tf);
1107
1108 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1109 {
1110 /* Reachable by goto to fallthru label only. Redirect it
1111 to the new label (already created, sadly), and do not
1112 emit the final branch out, or the fallthru label. */
1113 tf->fallthru_label = NULL;
1114 return;
1115 }
1116 }
1117
1118 /* Place the original return/goto to the original destination
1119 immediately after the finally block. */
1120 x = tf->goto_queue[0].cont_stmt;
1121 gimple_seq_add_stmt (&tf->top_p_seq, x);
1122 maybe_record_in_goto_queue (state, x);
1123 }
1124
1125 /* A subroutine of lower_try_finally. There are multiple edges incoming
1126 and outgoing from the finally block. Implement this by duplicating the
1127 finally block for every destination. */
1128
1129 static void
1130 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1131 {
1132 gimple_seq finally;
1133 gimple_seq new_stmt;
1134 gimple_seq seq;
1135 gimple x;
1136 tree tmp;
1137 location_t tf_loc = gimple_location (tf->try_finally_expr);
1138
1139 finally = gimple_try_cleanup (tf->top_p);
1140 tf->top_p_seq = gimple_try_eval (tf->top_p);
1141 new_stmt = NULL;
1142
1143 if (tf->may_fallthru)
1144 {
1145 seq = lower_try_finally_dup_block (finally, state);
1146 lower_eh_constructs_1 (state, seq);
1147 gimple_seq_add_seq (&new_stmt, seq);
1148
1149 tmp = lower_try_finally_fallthru_label (tf);
1150 x = gimple_build_goto (tmp);
1151 gimple_seq_add_stmt (&new_stmt, x);
1152 }
1153
1154 if (tf->may_throw)
1155 {
1156 seq = lower_try_finally_dup_block (finally, state);
1157 lower_eh_constructs_1 (state, seq);
1158
1159 emit_post_landing_pad (&eh_seq, tf->region);
1160 gimple_seq_add_seq (&eh_seq, seq);
1161 emit_resx (&eh_seq, tf->region);
1162 }
1163
1164 if (tf->goto_queue)
1165 {
1166 struct goto_queue_node *q, *qe;
1167 int return_index, index;
1168 struct labels_s
1169 {
1170 struct goto_queue_node *q;
1171 tree label;
1172 } *labels;
1173
1174 return_index = VEC_length (tree, tf->dest_array);
1175 labels = XCNEWVEC (struct labels_s, return_index + 1);
1176
1177 q = tf->goto_queue;
1178 qe = q + tf->goto_queue_active;
1179 for (; q < qe; q++)
1180 {
1181 index = q->index < 0 ? return_index : q->index;
1182
1183 if (!labels[index].q)
1184 labels[index].q = q;
1185 }
1186
1187 for (index = 0; index < return_index + 1; index++)
1188 {
1189 tree lab;
1190
1191 q = labels[index].q;
1192 if (! q)
1193 continue;
1194
1195 lab = labels[index].label
1196 = create_artificial_label (tf_loc);
1197
1198 if (index == return_index)
1199 do_return_redirection (q, lab, NULL);
1200 else
1201 do_goto_redirection (q, lab, NULL, tf);
1202
1203 x = gimple_build_label (lab);
1204 gimple_seq_add_stmt (&new_stmt, x);
1205
1206 seq = lower_try_finally_dup_block (finally, state);
1207 lower_eh_constructs_1 (state, seq);
1208 gimple_seq_add_seq (&new_stmt, seq);
1209
1210 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1211 maybe_record_in_goto_queue (state, q->cont_stmt);
1212 }
1213
1214 for (q = tf->goto_queue; q < qe; q++)
1215 {
1216 tree lab;
1217
1218 index = q->index < 0 ? return_index : q->index;
1219
1220 if (labels[index].q == q)
1221 continue;
1222
1223 lab = labels[index].label;
1224
1225 if (index == return_index)
1226 do_return_redirection (q, lab, NULL);
1227 else
1228 do_goto_redirection (q, lab, NULL, tf);
1229 }
1230
1231 replace_goto_queue (tf);
1232 free (labels);
1233 }
1234
1235 /* Need to link new stmts after running replace_goto_queue due
1236 to not wanting to process the same goto stmts twice. */
1237 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1238 }
1239
1240 /* A subroutine of lower_try_finally. There are multiple edges incoming
1241 and outgoing from the finally block. Implement this by instrumenting
1242 each incoming edge and creating a switch statement at the end of the
1243 finally block that branches to the appropriate destination. */
1244
1245 static void
1246 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1247 {
1248 struct goto_queue_node *q, *qe;
1249 tree finally_tmp, finally_label;
1250 int return_index, eh_index, fallthru_index;
1251 int nlabels, ndests, j, last_case_index;
1252 tree last_case;
1253 VEC (tree,heap) *case_label_vec;
1254 gimple_seq switch_body;
1255 gimple x;
1256 tree tmp;
1257 gimple switch_stmt;
1258 gimple_seq finally;
1259 struct pointer_map_t *cont_map = NULL;
1260 /* The location of the TRY_FINALLY stmt. */
1261 location_t tf_loc = gimple_location (tf->try_finally_expr);
1262 /* The location of the finally block. */
1263 location_t finally_loc;
1264
1265 switch_body = gimple_seq_alloc ();
1266
1267 /* Mash the TRY block to the head of the chain. */
1268 finally = gimple_try_cleanup (tf->top_p);
1269 tf->top_p_seq = gimple_try_eval (tf->top_p);
1270
1271 /* The location of the finally is either the last stmt in the finally
1272 block or the location of the TRY_FINALLY itself. */
1273 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1274 gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1275 : tf_loc;
1276
1277 /* Lower the finally block itself. */
1278 lower_eh_constructs_1 (state, finally);
1279
1280 /* Prepare for switch statement generation. */
1281 nlabels = VEC_length (tree, tf->dest_array);
1282 return_index = nlabels;
1283 eh_index = return_index + tf->may_return;
1284 fallthru_index = eh_index + tf->may_throw;
1285 ndests = fallthru_index + tf->may_fallthru;
1286
1287 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1288 finally_label = create_artificial_label (finally_loc);
1289
1290 /* We use VEC_quick_push on case_label_vec throughout this function,
1291 since we know the size in advance and allocate precisely as muce
1292 space as needed. */
1293 case_label_vec = VEC_alloc (tree, heap, ndests);
1294 last_case = NULL;
1295 last_case_index = 0;
1296
1297 /* Begin inserting code for getting to the finally block. Things
1298 are done in this order to correspond to the sequence the code is
1299 layed out. */
1300
1301 if (tf->may_fallthru)
1302 {
1303 x = gimple_build_assign (finally_tmp,
1304 build_int_cst (integer_type_node,
1305 fallthru_index));
1306 gimple_seq_add_stmt (&tf->top_p_seq, x);
1307
1308 tmp = build_int_cst (integer_type_node, fallthru_index);
1309 last_case = build_case_label (tmp, NULL,
1310 create_artificial_label (tf_loc));
1311 VEC_quick_push (tree, case_label_vec, last_case);
1312 last_case_index++;
1313
1314 x = gimple_build_label (CASE_LABEL (last_case));
1315 gimple_seq_add_stmt (&switch_body, x);
1316
1317 tmp = lower_try_finally_fallthru_label (tf);
1318 x = gimple_build_goto (tmp);
1319 gimple_seq_add_stmt (&switch_body, x);
1320 }
1321
1322 if (tf->may_throw)
1323 {
1324 emit_post_landing_pad (&eh_seq, tf->region);
1325
1326 x = gimple_build_assign (finally_tmp,
1327 build_int_cst (integer_type_node, eh_index));
1328 gimple_seq_add_stmt (&eh_seq, x);
1329
1330 x = gimple_build_goto (finally_label);
1331 gimple_seq_add_stmt (&eh_seq, x);
1332
1333 tmp = build_int_cst (integer_type_node, eh_index);
1334 last_case = build_case_label (tmp, NULL,
1335 create_artificial_label (tf_loc));
1336 VEC_quick_push (tree, case_label_vec, last_case);
1337 last_case_index++;
1338
1339 x = gimple_build_label (CASE_LABEL (last_case));
1340 gimple_seq_add_stmt (&eh_seq, x);
1341 emit_resx (&eh_seq, tf->region);
1342 }
1343
1344 x = gimple_build_label (finally_label);
1345 gimple_seq_add_stmt (&tf->top_p_seq, x);
1346
1347 gimple_seq_add_seq (&tf->top_p_seq, finally);
1348
1349 /* Redirect each incoming goto edge. */
1350 q = tf->goto_queue;
1351 qe = q + tf->goto_queue_active;
1352 j = last_case_index + tf->may_return;
1353 /* Prepare the assignments to finally_tmp that are executed upon the
1354 entrance through a particular edge. */
1355 for (; q < qe; ++q)
1356 {
1357 gimple_seq mod;
1358 int switch_id;
1359 unsigned int case_index;
1360
1361 mod = gimple_seq_alloc ();
1362
1363 if (q->index < 0)
1364 {
1365 x = gimple_build_assign (finally_tmp,
1366 build_int_cst (integer_type_node,
1367 return_index));
1368 gimple_seq_add_stmt (&mod, x);
1369 do_return_redirection (q, finally_label, mod);
1370 switch_id = return_index;
1371 }
1372 else
1373 {
1374 x = gimple_build_assign (finally_tmp,
1375 build_int_cst (integer_type_node, q->index));
1376 gimple_seq_add_stmt (&mod, x);
1377 do_goto_redirection (q, finally_label, mod, tf);
1378 switch_id = q->index;
1379 }
1380
1381 case_index = j + q->index;
1382 if (VEC_length (tree, case_label_vec) <= case_index
1383 || !VEC_index (tree, case_label_vec, case_index))
1384 {
1385 tree case_lab;
1386 void **slot;
1387 tmp = build_int_cst (integer_type_node, switch_id);
1388 case_lab = build_case_label (tmp, NULL,
1389 create_artificial_label (tf_loc));
1390 /* We store the cont_stmt in the pointer map, so that we can recover
1391 it in the loop below. */
1392 if (!cont_map)
1393 cont_map = pointer_map_create ();
1394 slot = pointer_map_insert (cont_map, case_lab);
1395 *slot = q->cont_stmt;
1396 VEC_quick_push (tree, case_label_vec, case_lab);
1397 }
1398 }
1399 for (j = last_case_index; j < last_case_index + nlabels; j++)
1400 {
1401 gimple cont_stmt;
1402 void **slot;
1403
1404 last_case = VEC_index (tree, case_label_vec, j);
1405
1406 gcc_assert (last_case);
1407 gcc_assert (cont_map);
1408
1409 slot = pointer_map_contains (cont_map, last_case);
1410 gcc_assert (slot);
1411 cont_stmt = *(gimple *) slot;
1412
1413 x = gimple_build_label (CASE_LABEL (last_case));
1414 gimple_seq_add_stmt (&switch_body, x);
1415 gimple_seq_add_stmt (&switch_body, cont_stmt);
1416 maybe_record_in_goto_queue (state, cont_stmt);
1417 }
1418 if (cont_map)
1419 pointer_map_destroy (cont_map);
1420
1421 replace_goto_queue (tf);
1422
1423 /* Make sure that the last case is the default label, as one is required.
1424 Then sort the labels, which is also required in GIMPLE. */
1425 CASE_LOW (last_case) = NULL;
1426 sort_case_labels (case_label_vec);
1427
1428 /* Build the switch statement, setting last_case to be the default
1429 label. */
1430 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1431 case_label_vec);
1432 gimple_set_location (switch_stmt, finally_loc);
1433
1434 /* Need to link SWITCH_STMT after running replace_goto_queue
1435 due to not wanting to process the same goto stmts twice. */
1436 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1437 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1438 }
1439
1440 /* Decide whether or not we are going to duplicate the finally block.
1441 There are several considerations.
1442
1443 First, if this is Java, then the finally block contains code
1444 written by the user. It has line numbers associated with it,
1445 so duplicating the block means it's difficult to set a breakpoint.
1446 Since controlling code generation via -g is verboten, we simply
1447 never duplicate code without optimization.
1448
1449 Second, we'd like to prevent egregious code growth. One way to
1450 do this is to estimate the size of the finally block, multiply
1451 that by the number of copies we'd need to make, and compare against
1452 the estimate of the size of the switch machinery we'd have to add. */
1453
1454 static bool
1455 decide_copy_try_finally (int ndests, gimple_seq finally)
1456 {
1457 int f_estimate, sw_estimate;
1458
1459 if (!optimize)
1460 return false;
1461
1462 /* Finally estimate N times, plus N gotos. */
1463 f_estimate = count_insns_seq (finally, &eni_size_weights);
1464 f_estimate = (f_estimate + 1) * ndests;
1465
1466 /* Switch statement (cost 10), N variable assignments, N gotos. */
1467 sw_estimate = 10 + 2 * ndests;
1468
1469 /* Optimize for size clearly wants our best guess. */
1470 if (optimize_function_for_size_p (cfun))
1471 return f_estimate < sw_estimate;
1472
1473 /* ??? These numbers are completely made up so far. */
1474 if (optimize > 1)
1475 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1476 else
1477 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1478 }
1479
1480 /* REG is the enclosing region for a possible cleanup region, or the region
1481 itself. Returns TRUE if such a region would be unreachable.
1482
1483 Cleanup regions within a must-not-throw region aren't actually reachable
1484 even if there are throwing stmts within them, because the personality
1485 routine will call terminate before unwinding. */
1486
1487 static bool
1488 cleanup_is_dead_in (eh_region reg)
1489 {
1490 while (reg && reg->type == ERT_CLEANUP)
1491 reg = reg->outer;
1492 return (reg && reg->type == ERT_MUST_NOT_THROW);
1493 }
1494
1495 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1496 to a sequence of labels and blocks, plus the exception region trees
1497 that record all the magic. This is complicated by the need to
1498 arrange for the FINALLY block to be executed on all exits. */
1499
1500 static gimple_seq
1501 lower_try_finally (struct leh_state *state, gimple tp)
1502 {
1503 struct leh_tf_state this_tf;
1504 struct leh_state this_state;
1505 int ndests;
1506 gimple_seq old_eh_seq;
1507
1508 /* Process the try block. */
1509
1510 memset (&this_tf, 0, sizeof (this_tf));
1511 this_tf.try_finally_expr = tp;
1512 this_tf.top_p = tp;
1513 this_tf.outer = state;
1514 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1515 {
1516 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1517 this_state.cur_region = this_tf.region;
1518 }
1519 else
1520 {
1521 this_tf.region = NULL;
1522 this_state.cur_region = state->cur_region;
1523 }
1524
1525 this_state.ehp_region = state->ehp_region;
1526 this_state.tf = &this_tf;
1527
1528 old_eh_seq = eh_seq;
1529 eh_seq = NULL;
1530
1531 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1532
1533 /* Determine if the try block is escaped through the bottom. */
1534 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1535
1536 /* Determine if any exceptions are possible within the try block. */
1537 if (this_tf.region)
1538 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1539 if (this_tf.may_throw)
1540 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1541
1542 /* Determine how many edges (still) reach the finally block. Or rather,
1543 how many destinations are reached by the finally block. Use this to
1544 determine how we process the finally block itself. */
1545
1546 ndests = VEC_length (tree, this_tf.dest_array);
1547 ndests += this_tf.may_fallthru;
1548 ndests += this_tf.may_return;
1549 ndests += this_tf.may_throw;
1550
1551 /* If the FINALLY block is not reachable, dike it out. */
1552 if (ndests == 0)
1553 {
1554 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1555 gimple_try_set_cleanup (tp, NULL);
1556 }
1557 /* If the finally block doesn't fall through, then any destination
1558 we might try to impose there isn't reached either. There may be
1559 some minor amount of cleanup and redirection still needed. */
1560 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1561 lower_try_finally_nofallthru (state, &this_tf);
1562
1563 /* We can easily special-case redirection to a single destination. */
1564 else if (ndests == 1)
1565 lower_try_finally_onedest (state, &this_tf);
1566 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1567 lower_try_finally_copy (state, &this_tf);
1568 else
1569 lower_try_finally_switch (state, &this_tf);
1570
1571 /* If someone requested we add a label at the end of the transformed
1572 block, do so. */
1573 if (this_tf.fallthru_label)
1574 {
1575 /* This must be reached only if ndests == 0. */
1576 gimple x = gimple_build_label (this_tf.fallthru_label);
1577 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1578 }
1579
1580 VEC_free (tree, heap, this_tf.dest_array);
1581 free (this_tf.goto_queue);
1582 if (this_tf.goto_queue_map)
1583 pointer_map_destroy (this_tf.goto_queue_map);
1584
1585 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1586 If there was no old eh_seq, then the append is trivially already done. */
1587 if (old_eh_seq)
1588 {
1589 if (eh_seq == NULL)
1590 eh_seq = old_eh_seq;
1591 else
1592 {
1593 gimple_seq new_eh_seq = eh_seq;
1594 eh_seq = old_eh_seq;
1595 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1596 }
1597 }
1598
1599 return this_tf.top_p_seq;
1600 }
1601
1602 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1603 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1604 exception region trees that records all the magic. */
1605
1606 static gimple_seq
1607 lower_catch (struct leh_state *state, gimple tp)
1608 {
1609 eh_region try_region = NULL;
1610 struct leh_state this_state = *state;
1611 gimple_stmt_iterator gsi;
1612 tree out_label;
1613 gimple_seq new_seq;
1614 gimple x;
1615 location_t try_catch_loc = gimple_location (tp);
1616
1617 if (flag_exceptions)
1618 {
1619 try_region = gen_eh_region_try (state->cur_region);
1620 this_state.cur_region = try_region;
1621 }
1622
1623 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1624
1625 if (!eh_region_may_contain_throw (try_region))
1626 return gimple_try_eval (tp);
1627
1628 new_seq = NULL;
1629 emit_eh_dispatch (&new_seq, try_region);
1630 emit_resx (&new_seq, try_region);
1631
1632 this_state.cur_region = state->cur_region;
1633 this_state.ehp_region = try_region;
1634
1635 out_label = NULL;
1636 for (gsi = gsi_start (gimple_try_cleanup (tp));
1637 !gsi_end_p (gsi);
1638 gsi_next (&gsi))
1639 {
1640 eh_catch c;
1641 gimple gcatch;
1642 gimple_seq handler;
1643
1644 gcatch = gsi_stmt (gsi);
1645 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1646
1647 handler = gimple_catch_handler (gcatch);
1648 lower_eh_constructs_1 (&this_state, handler);
1649
1650 c->label = create_artificial_label (UNKNOWN_LOCATION);
1651 x = gimple_build_label (c->label);
1652 gimple_seq_add_stmt (&new_seq, x);
1653
1654 gimple_seq_add_seq (&new_seq, handler);
1655
1656 if (gimple_seq_may_fallthru (new_seq))
1657 {
1658 if (!out_label)
1659 out_label = create_artificial_label (try_catch_loc);
1660
1661 x = gimple_build_goto (out_label);
1662 gimple_seq_add_stmt (&new_seq, x);
1663 }
1664 if (!c->type_list)
1665 break;
1666 }
1667
1668 gimple_try_set_cleanup (tp, new_seq);
1669
1670 return frob_into_branch_around (tp, try_region, out_label);
1671 }
1672
1673 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1674 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1675 region trees that record all the magic. */
1676
1677 static gimple_seq
1678 lower_eh_filter (struct leh_state *state, gimple tp)
1679 {
1680 struct leh_state this_state = *state;
1681 eh_region this_region = NULL;
1682 gimple inner, x;
1683 gimple_seq new_seq;
1684
1685 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1686
1687 if (flag_exceptions)
1688 {
1689 this_region = gen_eh_region_allowed (state->cur_region,
1690 gimple_eh_filter_types (inner));
1691 this_state.cur_region = this_region;
1692 }
1693
1694 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1695
1696 if (!eh_region_may_contain_throw (this_region))
1697 return gimple_try_eval (tp);
1698
1699 new_seq = NULL;
1700 this_state.cur_region = state->cur_region;
1701 this_state.ehp_region = this_region;
1702
1703 emit_eh_dispatch (&new_seq, this_region);
1704 emit_resx (&new_seq, this_region);
1705
1706 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1707 x = gimple_build_label (this_region->u.allowed.label);
1708 gimple_seq_add_stmt (&new_seq, x);
1709
1710 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1711 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1712
1713 gimple_try_set_cleanup (tp, new_seq);
1714
1715 return frob_into_branch_around (tp, this_region, NULL);
1716 }
1717
1718 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1719 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1720 plus the exception region trees that record all the magic. */
1721
1722 static gimple_seq
1723 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1724 {
1725 struct leh_state this_state = *state;
1726
1727 if (flag_exceptions)
1728 {
1729 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1730 eh_region this_region;
1731
1732 this_region = gen_eh_region_must_not_throw (state->cur_region);
1733 this_region->u.must_not_throw.failure_decl
1734 = gimple_eh_must_not_throw_fndecl (inner);
1735 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1736
1737 /* In order to get mangling applied to this decl, we must mark it
1738 used now. Otherwise, pass_ipa_free_lang_data won't think it
1739 needs to happen. */
1740 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1741
1742 this_state.cur_region = this_region;
1743 }
1744
1745 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1746
1747 return gimple_try_eval (tp);
1748 }
1749
1750 /* Implement a cleanup expression. This is similar to try-finally,
1751 except that we only execute the cleanup block for exception edges. */
1752
1753 static gimple_seq
1754 lower_cleanup (struct leh_state *state, gimple tp)
1755 {
1756 struct leh_state this_state = *state;
1757 eh_region this_region = NULL;
1758 struct leh_tf_state fake_tf;
1759 gimple_seq result;
1760 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1761
1762 if (flag_exceptions && !cleanup_dead)
1763 {
1764 this_region = gen_eh_region_cleanup (state->cur_region);
1765 this_state.cur_region = this_region;
1766 }
1767
1768 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1769
1770 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1771 return gimple_try_eval (tp);
1772
1773 /* Build enough of a try-finally state so that we can reuse
1774 honor_protect_cleanup_actions. */
1775 memset (&fake_tf, 0, sizeof (fake_tf));
1776 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1777 fake_tf.outer = state;
1778 fake_tf.region = this_region;
1779 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1780 fake_tf.may_throw = true;
1781
1782 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1783
1784 if (fake_tf.may_throw)
1785 {
1786 /* In this case honor_protect_cleanup_actions had nothing to do,
1787 and we should process this normally. */
1788 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1789 result = frob_into_branch_around (tp, this_region,
1790 fake_tf.fallthru_label);
1791 }
1792 else
1793 {
1794 /* In this case honor_protect_cleanup_actions did nearly all of
1795 the work. All we have left is to append the fallthru_label. */
1796
1797 result = gimple_try_eval (tp);
1798 if (fake_tf.fallthru_label)
1799 {
1800 gimple x = gimple_build_label (fake_tf.fallthru_label);
1801 gimple_seq_add_stmt (&result, x);
1802 }
1803 }
1804 return result;
1805 }
1806
1807 /* Main loop for lowering eh constructs. Also moves gsi to the next
1808 statement. */
1809
1810 static void
1811 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1812 {
1813 gimple_seq replace;
1814 gimple x;
1815 gimple stmt = gsi_stmt (*gsi);
1816
1817 switch (gimple_code (stmt))
1818 {
1819 case GIMPLE_CALL:
1820 {
1821 tree fndecl = gimple_call_fndecl (stmt);
1822 tree rhs, lhs;
1823
1824 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1825 switch (DECL_FUNCTION_CODE (fndecl))
1826 {
1827 case BUILT_IN_EH_POINTER:
1828 /* The front end may have generated a call to
1829 __builtin_eh_pointer (0) within a catch region. Replace
1830 this zero argument with the current catch region number. */
1831 if (state->ehp_region)
1832 {
1833 tree nr = build_int_cst (integer_type_node,
1834 state->ehp_region->index);
1835 gimple_call_set_arg (stmt, 0, nr);
1836 }
1837 else
1838 {
1839 /* The user has dome something silly. Remove it. */
1840 rhs = null_pointer_node;
1841 goto do_replace;
1842 }
1843 break;
1844
1845 case BUILT_IN_EH_FILTER:
1846 /* ??? This should never appear, but since it's a builtin it
1847 is accessible to abuse by users. Just remove it and
1848 replace the use with the arbitrary value zero. */
1849 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1850 do_replace:
1851 lhs = gimple_call_lhs (stmt);
1852 x = gimple_build_assign (lhs, rhs);
1853 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1854 /* FALLTHRU */
1855
1856 case BUILT_IN_EH_COPY_VALUES:
1857 /* Likewise this should not appear. Remove it. */
1858 gsi_remove (gsi, true);
1859 return;
1860
1861 default:
1862 break;
1863 }
1864 }
1865 /* FALLTHRU */
1866
1867 case GIMPLE_ASSIGN:
1868 /* If the stmt can throw use a new temporary for the assignment
1869 to a LHS. This makes sure the old value of the LHS is
1870 available on the EH edge. Only do so for statements that
1871 potentially fall thru (no noreturn calls e.g.), otherwise
1872 this new assignment might create fake fallthru regions. */
1873 if (stmt_could_throw_p (stmt)
1874 && gimple_has_lhs (stmt)
1875 && gimple_stmt_may_fallthru (stmt)
1876 && !tree_could_throw_p (gimple_get_lhs (stmt))
1877 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1878 {
1879 tree lhs = gimple_get_lhs (stmt);
1880 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1881 gimple s = gimple_build_assign (lhs, tmp);
1882 gimple_set_location (s, gimple_location (stmt));
1883 gimple_set_block (s, gimple_block (stmt));
1884 gimple_set_lhs (stmt, tmp);
1885 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1886 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1887 DECL_GIMPLE_REG_P (tmp) = 1;
1888 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1889 }
1890 /* Look for things that can throw exceptions, and record them. */
1891 if (state->cur_region && stmt_could_throw_p (stmt))
1892 {
1893 record_stmt_eh_region (state->cur_region, stmt);
1894 note_eh_region_may_contain_throw (state->cur_region);
1895 }
1896 break;
1897
1898 case GIMPLE_COND:
1899 case GIMPLE_GOTO:
1900 case GIMPLE_RETURN:
1901 maybe_record_in_goto_queue (state, stmt);
1902 break;
1903
1904 case GIMPLE_SWITCH:
1905 verify_norecord_switch_expr (state, stmt);
1906 break;
1907
1908 case GIMPLE_TRY:
1909 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1910 replace = lower_try_finally (state, stmt);
1911 else
1912 {
1913 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1914 if (!x)
1915 {
1916 replace = gimple_try_eval (stmt);
1917 lower_eh_constructs_1 (state, replace);
1918 }
1919 else
1920 switch (gimple_code (x))
1921 {
1922 case GIMPLE_CATCH:
1923 replace = lower_catch (state, stmt);
1924 break;
1925 case GIMPLE_EH_FILTER:
1926 replace = lower_eh_filter (state, stmt);
1927 break;
1928 case GIMPLE_EH_MUST_NOT_THROW:
1929 replace = lower_eh_must_not_throw (state, stmt);
1930 break;
1931 default:
1932 replace = lower_cleanup (state, stmt);
1933 break;
1934 }
1935 }
1936
1937 /* Remove the old stmt and insert the transformed sequence
1938 instead. */
1939 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1940 gsi_remove (gsi, true);
1941
1942 /* Return since we don't want gsi_next () */
1943 return;
1944
1945 default:
1946 /* A type, a decl, or some kind of statement that we're not
1947 interested in. Don't walk them. */
1948 break;
1949 }
1950
1951 gsi_next (gsi);
1952 }
1953
1954 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1955
1956 static void
1957 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
1958 {
1959 gimple_stmt_iterator gsi;
1960 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
1961 lower_eh_constructs_2 (state, &gsi);
1962 }
1963
1964 static unsigned int
1965 lower_eh_constructs (void)
1966 {
1967 struct leh_state null_state;
1968 gimple_seq bodyp;
1969
1970 bodyp = gimple_body (current_function_decl);
1971 if (bodyp == NULL)
1972 return 0;
1973
1974 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1975 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
1976 memset (&null_state, 0, sizeof (null_state));
1977
1978 collect_finally_tree_1 (bodyp, NULL);
1979 lower_eh_constructs_1 (&null_state, bodyp);
1980
1981 /* We assume there's a return statement, or something, at the end of
1982 the function, and thus ploping the EH sequence afterward won't
1983 change anything. */
1984 gcc_assert (!gimple_seq_may_fallthru (bodyp));
1985 gimple_seq_add_seq (&bodyp, eh_seq);
1986
1987 /* We assume that since BODYP already existed, adding EH_SEQ to it
1988 didn't change its value, and we don't have to re-set the function. */
1989 gcc_assert (bodyp == gimple_body (current_function_decl));
1990
1991 htab_delete (finally_tree);
1992 BITMAP_FREE (eh_region_may_contain_throw_map);
1993 eh_seq = NULL;
1994
1995 /* If this function needs a language specific EH personality routine
1996 and the frontend didn't already set one do so now. */
1997 if (function_needs_eh_personality (cfun) == eh_personality_lang
1998 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
1999 DECL_FUNCTION_PERSONALITY (current_function_decl)
2000 = lang_hooks.eh_personality ();
2001
2002 return 0;
2003 }
2004
2005 struct gimple_opt_pass pass_lower_eh =
2006 {
2007 {
2008 GIMPLE_PASS,
2009 "eh", /* name */
2010 NULL, /* gate */
2011 lower_eh_constructs, /* execute */
2012 NULL, /* sub */
2013 NULL, /* next */
2014 0, /* static_pass_number */
2015 TV_TREE_EH, /* tv_id */
2016 PROP_gimple_lcf, /* properties_required */
2017 PROP_gimple_leh, /* properties_provided */
2018 0, /* properties_destroyed */
2019 0, /* todo_flags_start */
2020 0 /* todo_flags_finish */
2021 }
2022 };
2023 \f
2024 /* Create the multiple edges from an EH_DISPATCH statement to all of
2025 the possible handlers for its EH region. Return true if there's
2026 no fallthru edge; false if there is. */
2027
2028 bool
2029 make_eh_dispatch_edges (gimple stmt)
2030 {
2031 eh_region r;
2032 eh_catch c;
2033 basic_block src, dst;
2034
2035 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2036 src = gimple_bb (stmt);
2037
2038 switch (r->type)
2039 {
2040 case ERT_TRY:
2041 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2042 {
2043 dst = label_to_block (c->label);
2044 make_edge (src, dst, 0);
2045
2046 /* A catch-all handler doesn't have a fallthru. */
2047 if (c->type_list == NULL)
2048 return false;
2049 }
2050 break;
2051
2052 case ERT_ALLOWED_EXCEPTIONS:
2053 dst = label_to_block (r->u.allowed.label);
2054 make_edge (src, dst, 0);
2055 break;
2056
2057 default:
2058 gcc_unreachable ();
2059 }
2060
2061 return true;
2062 }
2063
2064 /* Create the single EH edge from STMT to its nearest landing pad,
2065 if there is such a landing pad within the current function. */
2066
2067 void
2068 make_eh_edges (gimple stmt)
2069 {
2070 basic_block src, dst;
2071 eh_landing_pad lp;
2072 int lp_nr;
2073
2074 lp_nr = lookup_stmt_eh_lp (stmt);
2075 if (lp_nr <= 0)
2076 return;
2077
2078 lp = get_eh_landing_pad_from_number (lp_nr);
2079 gcc_assert (lp != NULL);
2080
2081 src = gimple_bb (stmt);
2082 dst = label_to_block (lp->post_landing_pad);
2083 make_edge (src, dst, EDGE_EH);
2084 }
2085
2086 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2087 do not actually perform the final edge redirection.
2088
2089 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2090 we intend to change the destination EH region as well; this means
2091 EH_LANDING_PAD_NR must already be set on the destination block label.
2092 If false, we're being called from generic cfg manipulation code and we
2093 should preserve our place within the region tree. */
2094
2095 static void
2096 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2097 {
2098 eh_landing_pad old_lp, new_lp;
2099 basic_block old_bb;
2100 gimple throw_stmt;
2101 int old_lp_nr, new_lp_nr;
2102 tree old_label, new_label;
2103 edge_iterator ei;
2104 edge e;
2105
2106 old_bb = edge_in->dest;
2107 old_label = gimple_block_label (old_bb);
2108 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2109 gcc_assert (old_lp_nr > 0);
2110 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2111
2112 throw_stmt = last_stmt (edge_in->src);
2113 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2114
2115 new_label = gimple_block_label (new_bb);
2116
2117 /* Look for an existing region that might be using NEW_BB already. */
2118 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2119 if (new_lp_nr)
2120 {
2121 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2122 gcc_assert (new_lp);
2123
2124 /* Unless CHANGE_REGION is true, the new and old landing pad
2125 had better be associated with the same EH region. */
2126 gcc_assert (change_region || new_lp->region == old_lp->region);
2127 }
2128 else
2129 {
2130 new_lp = NULL;
2131 gcc_assert (!change_region);
2132 }
2133
2134 /* Notice when we redirect the last EH edge away from OLD_BB. */
2135 FOR_EACH_EDGE (e, ei, old_bb->preds)
2136 if (e != edge_in && (e->flags & EDGE_EH))
2137 break;
2138
2139 if (new_lp)
2140 {
2141 /* NEW_LP already exists. If there are still edges into OLD_LP,
2142 there's nothing to do with the EH tree. If there are no more
2143 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2144 If CHANGE_REGION is true, then our caller is expecting to remove
2145 the landing pad. */
2146 if (e == NULL && !change_region)
2147 remove_eh_landing_pad (old_lp);
2148 }
2149 else
2150 {
2151 /* No correct landing pad exists. If there are no more edges
2152 into OLD_LP, then we can simply re-use the existing landing pad.
2153 Otherwise, we have to create a new landing pad. */
2154 if (e == NULL)
2155 {
2156 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2157 new_lp = old_lp;
2158 }
2159 else
2160 new_lp = gen_eh_landing_pad (old_lp->region);
2161 new_lp->post_landing_pad = new_label;
2162 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2163 }
2164
2165 /* Maybe move the throwing statement to the new region. */
2166 if (old_lp != new_lp)
2167 {
2168 remove_stmt_from_eh_lp (throw_stmt);
2169 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2170 }
2171 }
2172
2173 /* Redirect EH edge E to NEW_BB. */
2174
2175 edge
2176 redirect_eh_edge (edge edge_in, basic_block new_bb)
2177 {
2178 redirect_eh_edge_1 (edge_in, new_bb, false);
2179 return ssa_redirect_edge (edge_in, new_bb);
2180 }
2181
2182 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2183 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2184 The actual edge update will happen in the caller. */
2185
2186 void
2187 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2188 {
2189 tree new_lab = gimple_block_label (new_bb);
2190 bool any_changed = false;
2191 basic_block old_bb;
2192 eh_region r;
2193 eh_catch c;
2194
2195 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2196 switch (r->type)
2197 {
2198 case ERT_TRY:
2199 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2200 {
2201 old_bb = label_to_block (c->label);
2202 if (old_bb == e->dest)
2203 {
2204 c->label = new_lab;
2205 any_changed = true;
2206 }
2207 }
2208 break;
2209
2210 case ERT_ALLOWED_EXCEPTIONS:
2211 old_bb = label_to_block (r->u.allowed.label);
2212 gcc_assert (old_bb == e->dest);
2213 r->u.allowed.label = new_lab;
2214 any_changed = true;
2215 break;
2216
2217 default:
2218 gcc_unreachable ();
2219 }
2220
2221 gcc_assert (any_changed);
2222 }
2223 \f
2224 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2225
2226 bool
2227 operation_could_trap_helper_p (enum tree_code op,
2228 bool fp_operation,
2229 bool honor_trapv,
2230 bool honor_nans,
2231 bool honor_snans,
2232 tree divisor,
2233 bool *handled)
2234 {
2235 *handled = true;
2236 switch (op)
2237 {
2238 case TRUNC_DIV_EXPR:
2239 case CEIL_DIV_EXPR:
2240 case FLOOR_DIV_EXPR:
2241 case ROUND_DIV_EXPR:
2242 case EXACT_DIV_EXPR:
2243 case CEIL_MOD_EXPR:
2244 case FLOOR_MOD_EXPR:
2245 case ROUND_MOD_EXPR:
2246 case TRUNC_MOD_EXPR:
2247 case RDIV_EXPR:
2248 if (honor_snans || honor_trapv)
2249 return true;
2250 if (fp_operation)
2251 return flag_trapping_math;
2252 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2253 return true;
2254 return false;
2255
2256 case LT_EXPR:
2257 case LE_EXPR:
2258 case GT_EXPR:
2259 case GE_EXPR:
2260 case LTGT_EXPR:
2261 /* Some floating point comparisons may trap. */
2262 return honor_nans;
2263
2264 case EQ_EXPR:
2265 case NE_EXPR:
2266 case UNORDERED_EXPR:
2267 case ORDERED_EXPR:
2268 case UNLT_EXPR:
2269 case UNLE_EXPR:
2270 case UNGT_EXPR:
2271 case UNGE_EXPR:
2272 case UNEQ_EXPR:
2273 return honor_snans;
2274
2275 case CONVERT_EXPR:
2276 case FIX_TRUNC_EXPR:
2277 /* Conversion of floating point might trap. */
2278 return honor_nans;
2279
2280 case NEGATE_EXPR:
2281 case ABS_EXPR:
2282 case CONJ_EXPR:
2283 /* These operations don't trap with floating point. */
2284 if (honor_trapv)
2285 return true;
2286 return false;
2287
2288 case PLUS_EXPR:
2289 case MINUS_EXPR:
2290 case MULT_EXPR:
2291 /* Any floating arithmetic may trap. */
2292 if (fp_operation && flag_trapping_math)
2293 return true;
2294 if (honor_trapv)
2295 return true;
2296 return false;
2297
2298 case COMPLEX_EXPR:
2299 case CONSTRUCTOR:
2300 /* Constructing an object cannot trap. */
2301 return false;
2302
2303 default:
2304 /* Any floating arithmetic may trap. */
2305 if (fp_operation && flag_trapping_math)
2306 return true;
2307
2308 *handled = false;
2309 return false;
2310 }
2311 }
2312
2313 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2314 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2315 type operands that may trap. If OP is a division operator, DIVISOR contains
2316 the value of the divisor. */
2317
2318 bool
2319 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2320 tree divisor)
2321 {
2322 bool honor_nans = (fp_operation && flag_trapping_math
2323 && !flag_finite_math_only);
2324 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2325 bool handled;
2326
2327 if (TREE_CODE_CLASS (op) != tcc_comparison
2328 && TREE_CODE_CLASS (op) != tcc_unary
2329 && TREE_CODE_CLASS (op) != tcc_binary)
2330 return false;
2331
2332 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2333 honor_nans, honor_snans, divisor,
2334 &handled);
2335 }
2336
2337 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2338 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2339 This routine expects only GIMPLE lhs or rhs input. */
2340
2341 bool
2342 tree_could_trap_p (tree expr)
2343 {
2344 enum tree_code code;
2345 bool fp_operation = false;
2346 bool honor_trapv = false;
2347 tree t, base, div = NULL_TREE;
2348
2349 if (!expr)
2350 return false;
2351
2352 code = TREE_CODE (expr);
2353 t = TREE_TYPE (expr);
2354
2355 if (t)
2356 {
2357 if (COMPARISON_CLASS_P (expr))
2358 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2359 else
2360 fp_operation = FLOAT_TYPE_P (t);
2361 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2362 }
2363
2364 if (TREE_CODE_CLASS (code) == tcc_binary)
2365 div = TREE_OPERAND (expr, 1);
2366 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2367 return true;
2368
2369 restart:
2370 switch (code)
2371 {
2372 case TARGET_MEM_REF:
2373 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2374 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2375 return false;
2376 return !TREE_THIS_NOTRAP (expr);
2377
2378 case COMPONENT_REF:
2379 case REALPART_EXPR:
2380 case IMAGPART_EXPR:
2381 case BIT_FIELD_REF:
2382 case VIEW_CONVERT_EXPR:
2383 case WITH_SIZE_EXPR:
2384 expr = TREE_OPERAND (expr, 0);
2385 code = TREE_CODE (expr);
2386 goto restart;
2387
2388 case ARRAY_RANGE_REF:
2389 base = TREE_OPERAND (expr, 0);
2390 if (tree_could_trap_p (base))
2391 return true;
2392 if (TREE_THIS_NOTRAP (expr))
2393 return false;
2394 return !range_in_array_bounds_p (expr);
2395
2396 case ARRAY_REF:
2397 base = TREE_OPERAND (expr, 0);
2398 if (tree_could_trap_p (base))
2399 return true;
2400 if (TREE_THIS_NOTRAP (expr))
2401 return false;
2402 return !in_array_bounds_p (expr);
2403
2404 case MEM_REF:
2405 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2406 return false;
2407 /* Fallthru. */
2408 case INDIRECT_REF:
2409 return !TREE_THIS_NOTRAP (expr);
2410
2411 case ASM_EXPR:
2412 return TREE_THIS_VOLATILE (expr);
2413
2414 case CALL_EXPR:
2415 t = get_callee_fndecl (expr);
2416 /* Assume that calls to weak functions may trap. */
2417 if (!t || !DECL_P (t))
2418 return true;
2419 if (DECL_WEAK (t))
2420 return tree_could_trap_p (t);
2421 return false;
2422
2423 case FUNCTION_DECL:
2424 /* Assume that accesses to weak functions may trap, unless we know
2425 they are certainly defined in current TU or in some other
2426 LTO partition. */
2427 if (DECL_WEAK (expr))
2428 {
2429 struct cgraph_node *node;
2430 if (!DECL_EXTERNAL (expr))
2431 return false;
2432 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2433 if (node && node->in_other_partition)
2434 return false;
2435 return true;
2436 }
2437 return false;
2438
2439 case VAR_DECL:
2440 /* Assume that accesses to weak vars may trap, unless we know
2441 they are certainly defined in current TU or in some other
2442 LTO partition. */
2443 if (DECL_WEAK (expr))
2444 {
2445 struct varpool_node *node;
2446 if (!DECL_EXTERNAL (expr))
2447 return false;
2448 node = varpool_variable_node (varpool_get_node (expr), NULL);
2449 if (node && node->in_other_partition)
2450 return false;
2451 return true;
2452 }
2453 return false;
2454
2455 default:
2456 return false;
2457 }
2458 }
2459
2460
2461 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2462 an assignment or a conditional) may throw. */
2463
2464 static bool
2465 stmt_could_throw_1_p (gimple stmt)
2466 {
2467 enum tree_code code = gimple_expr_code (stmt);
2468 bool honor_nans = false;
2469 bool honor_snans = false;
2470 bool fp_operation = false;
2471 bool honor_trapv = false;
2472 tree t;
2473 size_t i;
2474 bool handled, ret;
2475
2476 if (TREE_CODE_CLASS (code) == tcc_comparison
2477 || TREE_CODE_CLASS (code) == tcc_unary
2478 || TREE_CODE_CLASS (code) == tcc_binary)
2479 {
2480 if (is_gimple_assign (stmt)
2481 && TREE_CODE_CLASS (code) == tcc_comparison)
2482 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2483 else if (gimple_code (stmt) == GIMPLE_COND)
2484 t = TREE_TYPE (gimple_cond_lhs (stmt));
2485 else
2486 t = gimple_expr_type (stmt);
2487 fp_operation = FLOAT_TYPE_P (t);
2488 if (fp_operation)
2489 {
2490 honor_nans = flag_trapping_math && !flag_finite_math_only;
2491 honor_snans = flag_signaling_nans != 0;
2492 }
2493 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2494 honor_trapv = true;
2495 }
2496
2497 /* Check if the main expression may trap. */
2498 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2499 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2500 honor_nans, honor_snans, t,
2501 &handled);
2502 if (handled)
2503 return ret;
2504
2505 /* If the expression does not trap, see if any of the individual operands may
2506 trap. */
2507 for (i = 0; i < gimple_num_ops (stmt); i++)
2508 if (tree_could_trap_p (gimple_op (stmt, i)))
2509 return true;
2510
2511 return false;
2512 }
2513
2514
2515 /* Return true if statement STMT could throw an exception. */
2516
2517 bool
2518 stmt_could_throw_p (gimple stmt)
2519 {
2520 if (!flag_exceptions)
2521 return false;
2522
2523 /* The only statements that can throw an exception are assignments,
2524 conditionals, calls, resx, and asms. */
2525 switch (gimple_code (stmt))
2526 {
2527 case GIMPLE_RESX:
2528 return true;
2529
2530 case GIMPLE_CALL:
2531 return !gimple_call_nothrow_p (stmt);
2532
2533 case GIMPLE_ASSIGN:
2534 case GIMPLE_COND:
2535 if (!cfun->can_throw_non_call_exceptions)
2536 return false;
2537 return stmt_could_throw_1_p (stmt);
2538
2539 case GIMPLE_ASM:
2540 if (!cfun->can_throw_non_call_exceptions)
2541 return false;
2542 return gimple_asm_volatile_p (stmt);
2543
2544 default:
2545 return false;
2546 }
2547 }
2548
2549
2550 /* Return true if expression T could throw an exception. */
2551
2552 bool
2553 tree_could_throw_p (tree t)
2554 {
2555 if (!flag_exceptions)
2556 return false;
2557 if (TREE_CODE (t) == MODIFY_EXPR)
2558 {
2559 if (cfun->can_throw_non_call_exceptions
2560 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2561 return true;
2562 t = TREE_OPERAND (t, 1);
2563 }
2564
2565 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2566 t = TREE_OPERAND (t, 0);
2567 if (TREE_CODE (t) == CALL_EXPR)
2568 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2569 if (cfun->can_throw_non_call_exceptions)
2570 return tree_could_trap_p (t);
2571 return false;
2572 }
2573
2574 /* Return true if STMT can throw an exception that is not caught within
2575 the current function (CFUN). */
2576
2577 bool
2578 stmt_can_throw_external (gimple stmt)
2579 {
2580 int lp_nr;
2581
2582 if (!stmt_could_throw_p (stmt))
2583 return false;
2584
2585 lp_nr = lookup_stmt_eh_lp (stmt);
2586 return lp_nr == 0;
2587 }
2588
2589 /* Return true if STMT can throw an exception that is caught within
2590 the current function (CFUN). */
2591
2592 bool
2593 stmt_can_throw_internal (gimple stmt)
2594 {
2595 int lp_nr;
2596
2597 if (!stmt_could_throw_p (stmt))
2598 return false;
2599
2600 lp_nr = lookup_stmt_eh_lp (stmt);
2601 return lp_nr > 0;
2602 }
2603
2604 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2605 remove any entry it might have from the EH table. Return true if
2606 any change was made. */
2607
2608 bool
2609 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2610 {
2611 if (stmt_could_throw_p (stmt))
2612 return false;
2613 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2614 }
2615
2616 /* Likewise, but always use the current function. */
2617
2618 bool
2619 maybe_clean_eh_stmt (gimple stmt)
2620 {
2621 return maybe_clean_eh_stmt_fn (cfun, stmt);
2622 }
2623
2624 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2625 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2626 in the table if it should be in there. Return TRUE if a replacement was
2627 done that my require an EH edge purge. */
2628
2629 bool
2630 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2631 {
2632 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2633
2634 if (lp_nr != 0)
2635 {
2636 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2637
2638 if (new_stmt == old_stmt && new_stmt_could_throw)
2639 return false;
2640
2641 remove_stmt_from_eh_lp (old_stmt);
2642 if (new_stmt_could_throw)
2643 {
2644 add_stmt_to_eh_lp (new_stmt, lp_nr);
2645 return false;
2646 }
2647 else
2648 return true;
2649 }
2650
2651 return false;
2652 }
2653
2654 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2655 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2656 operand is the return value of duplicate_eh_regions. */
2657
2658 bool
2659 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2660 struct function *old_fun, gimple old_stmt,
2661 struct pointer_map_t *map, int default_lp_nr)
2662 {
2663 int old_lp_nr, new_lp_nr;
2664 void **slot;
2665
2666 if (!stmt_could_throw_p (new_stmt))
2667 return false;
2668
2669 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2670 if (old_lp_nr == 0)
2671 {
2672 if (default_lp_nr == 0)
2673 return false;
2674 new_lp_nr = default_lp_nr;
2675 }
2676 else if (old_lp_nr > 0)
2677 {
2678 eh_landing_pad old_lp, new_lp;
2679
2680 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2681 slot = pointer_map_contains (map, old_lp);
2682 new_lp = (eh_landing_pad) *slot;
2683 new_lp_nr = new_lp->index;
2684 }
2685 else
2686 {
2687 eh_region old_r, new_r;
2688
2689 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2690 slot = pointer_map_contains (map, old_r);
2691 new_r = (eh_region) *slot;
2692 new_lp_nr = -new_r->index;
2693 }
2694
2695 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2696 return true;
2697 }
2698
2699 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2700 and thus no remapping is required. */
2701
2702 bool
2703 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2704 {
2705 int lp_nr;
2706
2707 if (!stmt_could_throw_p (new_stmt))
2708 return false;
2709
2710 lp_nr = lookup_stmt_eh_lp (old_stmt);
2711 if (lp_nr == 0)
2712 return false;
2713
2714 add_stmt_to_eh_lp (new_stmt, lp_nr);
2715 return true;
2716 }
2717 \f
2718 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2719 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2720 this only handles handlers consisting of a single call, as that's the
2721 important case for C++: a destructor call for a particular object showing
2722 up in multiple handlers. */
2723
2724 static bool
2725 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2726 {
2727 gimple_stmt_iterator gsi;
2728 gimple ones, twos;
2729 unsigned int ai;
2730
2731 gsi = gsi_start (oneh);
2732 if (!gsi_one_before_end_p (gsi))
2733 return false;
2734 ones = gsi_stmt (gsi);
2735
2736 gsi = gsi_start (twoh);
2737 if (!gsi_one_before_end_p (gsi))
2738 return false;
2739 twos = gsi_stmt (gsi);
2740
2741 if (!is_gimple_call (ones)
2742 || !is_gimple_call (twos)
2743 || gimple_call_lhs (ones)
2744 || gimple_call_lhs (twos)
2745 || gimple_call_chain (ones)
2746 || gimple_call_chain (twos)
2747 || !gimple_call_same_target_p (ones, twos)
2748 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2749 return false;
2750
2751 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2752 if (!operand_equal_p (gimple_call_arg (ones, ai),
2753 gimple_call_arg (twos, ai), 0))
2754 return false;
2755
2756 return true;
2757 }
2758
2759 /* Optimize
2760 try { A() } finally { try { ~B() } catch { ~A() } }
2761 try { ... } finally { ~A() }
2762 into
2763 try { A() } catch { ~B() }
2764 try { ~B() ... } finally { ~A() }
2765
2766 This occurs frequently in C++, where A is a local variable and B is a
2767 temporary used in the initializer for A. */
2768
2769 static void
2770 optimize_double_finally (gimple one, gimple two)
2771 {
2772 gimple oneh;
2773 gimple_stmt_iterator gsi;
2774
2775 gsi = gsi_start (gimple_try_cleanup (one));
2776 if (!gsi_one_before_end_p (gsi))
2777 return;
2778
2779 oneh = gsi_stmt (gsi);
2780 if (gimple_code (oneh) != GIMPLE_TRY
2781 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2782 return;
2783
2784 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2785 {
2786 gimple_seq seq = gimple_try_eval (oneh);
2787
2788 gimple_try_set_cleanup (one, seq);
2789 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2790 seq = copy_gimple_seq_and_replace_locals (seq);
2791 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2792 gimple_try_set_eval (two, seq);
2793 }
2794 }
2795
2796 /* Perform EH refactoring optimizations that are simpler to do when code
2797 flow has been lowered but EH structures haven't. */
2798
2799 static void
2800 refactor_eh_r (gimple_seq seq)
2801 {
2802 gimple_stmt_iterator gsi;
2803 gimple one, two;
2804
2805 one = NULL;
2806 two = NULL;
2807 gsi = gsi_start (seq);
2808 while (1)
2809 {
2810 one = two;
2811 if (gsi_end_p (gsi))
2812 two = NULL;
2813 else
2814 two = gsi_stmt (gsi);
2815 if (one
2816 && two
2817 && gimple_code (one) == GIMPLE_TRY
2818 && gimple_code (two) == GIMPLE_TRY
2819 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2820 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2821 optimize_double_finally (one, two);
2822 if (one)
2823 switch (gimple_code (one))
2824 {
2825 case GIMPLE_TRY:
2826 refactor_eh_r (gimple_try_eval (one));
2827 refactor_eh_r (gimple_try_cleanup (one));
2828 break;
2829 case GIMPLE_CATCH:
2830 refactor_eh_r (gimple_catch_handler (one));
2831 break;
2832 case GIMPLE_EH_FILTER:
2833 refactor_eh_r (gimple_eh_filter_failure (one));
2834 break;
2835 default:
2836 break;
2837 }
2838 if (two)
2839 gsi_next (&gsi);
2840 else
2841 break;
2842 }
2843 }
2844
2845 static unsigned
2846 refactor_eh (void)
2847 {
2848 refactor_eh_r (gimple_body (current_function_decl));
2849 return 0;
2850 }
2851
2852 static bool
2853 gate_refactor_eh (void)
2854 {
2855 return flag_exceptions != 0;
2856 }
2857
2858 struct gimple_opt_pass pass_refactor_eh =
2859 {
2860 {
2861 GIMPLE_PASS,
2862 "ehopt", /* name */
2863 gate_refactor_eh, /* gate */
2864 refactor_eh, /* execute */
2865 NULL, /* sub */
2866 NULL, /* next */
2867 0, /* static_pass_number */
2868 TV_TREE_EH, /* tv_id */
2869 PROP_gimple_lcf, /* properties_required */
2870 0, /* properties_provided */
2871 0, /* properties_destroyed */
2872 0, /* todo_flags_start */
2873 0 /* todo_flags_finish */
2874 }
2875 };
2876 \f
2877 /* At the end of gimple optimization, we can lower RESX. */
2878
2879 static bool
2880 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2881 {
2882 int lp_nr;
2883 eh_region src_r, dst_r;
2884 gimple_stmt_iterator gsi;
2885 gimple x;
2886 tree fn, src_nr;
2887 bool ret = false;
2888
2889 lp_nr = lookup_stmt_eh_lp (stmt);
2890 if (lp_nr != 0)
2891 dst_r = get_eh_region_from_lp_number (lp_nr);
2892 else
2893 dst_r = NULL;
2894
2895 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
2896 gsi = gsi_last_bb (bb);
2897
2898 if (src_r == NULL)
2899 {
2900 /* We can wind up with no source region when pass_cleanup_eh shows
2901 that there are no entries into an eh region and deletes it, but
2902 then the block that contains the resx isn't removed. This can
2903 happen without optimization when the switch statement created by
2904 lower_try_finally_switch isn't simplified to remove the eh case.
2905
2906 Resolve this by expanding the resx node to an abort. */
2907
2908 fn = builtin_decl_implicit (BUILT_IN_TRAP);
2909 x = gimple_build_call (fn, 0);
2910 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2911
2912 while (EDGE_COUNT (bb->succs) > 0)
2913 remove_edge (EDGE_SUCC (bb, 0));
2914 }
2915 else if (dst_r)
2916 {
2917 /* When we have a destination region, we resolve this by copying
2918 the excptr and filter values into place, and changing the edge
2919 to immediately after the landing pad. */
2920 edge e;
2921
2922 if (lp_nr < 0)
2923 {
2924 basic_block new_bb;
2925 void **slot;
2926 tree lab;
2927
2928 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
2929 the failure decl into a new block, if needed. */
2930 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
2931
2932 slot = pointer_map_contains (mnt_map, dst_r);
2933 if (slot == NULL)
2934 {
2935 gimple_stmt_iterator gsi2;
2936
2937 new_bb = create_empty_bb (bb);
2938 lab = gimple_block_label (new_bb);
2939 gsi2 = gsi_start_bb (new_bb);
2940
2941 fn = dst_r->u.must_not_throw.failure_decl;
2942 x = gimple_build_call (fn, 0);
2943 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
2944 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
2945
2946 slot = pointer_map_insert (mnt_map, dst_r);
2947 *slot = lab;
2948 }
2949 else
2950 {
2951 lab = (tree) *slot;
2952 new_bb = label_to_block (lab);
2953 }
2954
2955 gcc_assert (EDGE_COUNT (bb->succs) == 0);
2956 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
2957 e->count = bb->count;
2958 e->probability = REG_BR_PROB_BASE;
2959 }
2960 else
2961 {
2962 edge_iterator ei;
2963 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
2964
2965 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
2966 src_nr = build_int_cst (integer_type_node, src_r->index);
2967 x = gimple_build_call (fn, 2, dst_nr, src_nr);
2968 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2969
2970 /* Update the flags for the outgoing edge. */
2971 e = single_succ_edge (bb);
2972 gcc_assert (e->flags & EDGE_EH);
2973 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
2974
2975 /* If there are no more EH users of the landing pad, delete it. */
2976 FOR_EACH_EDGE (e, ei, e->dest->preds)
2977 if (e->flags & EDGE_EH)
2978 break;
2979 if (e == NULL)
2980 {
2981 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2982 remove_eh_landing_pad (lp);
2983 }
2984 }
2985
2986 ret = true;
2987 }
2988 else
2989 {
2990 tree var;
2991
2992 /* When we don't have a destination region, this exception escapes
2993 up the call chain. We resolve this by generating a call to the
2994 _Unwind_Resume library function. */
2995
2996 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
2997 with no arguments for C++ and Java. Check for that. */
2998 if (src_r->use_cxa_end_cleanup)
2999 {
3000 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3001 x = gimple_build_call (fn, 0);
3002 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3003 }
3004 else
3005 {
3006 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3007 src_nr = build_int_cst (integer_type_node, src_r->index);
3008 x = gimple_build_call (fn, 1, src_nr);
3009 var = create_tmp_var (ptr_type_node, NULL);
3010 var = make_ssa_name (var, x);
3011 gimple_call_set_lhs (x, var);
3012 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3013
3014 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3015 x = gimple_build_call (fn, 1, var);
3016 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3017 }
3018
3019 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3020 }
3021
3022 gsi_remove (&gsi, true);
3023
3024 return ret;
3025 }
3026
3027 static unsigned
3028 execute_lower_resx (void)
3029 {
3030 basic_block bb;
3031 struct pointer_map_t *mnt_map;
3032 bool dominance_invalidated = false;
3033 bool any_rewritten = false;
3034
3035 mnt_map = pointer_map_create ();
3036
3037 FOR_EACH_BB (bb)
3038 {
3039 gimple last = last_stmt (bb);
3040 if (last && is_gimple_resx (last))
3041 {
3042 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3043 any_rewritten = true;
3044 }
3045 }
3046
3047 pointer_map_destroy (mnt_map);
3048
3049 if (dominance_invalidated)
3050 {
3051 free_dominance_info (CDI_DOMINATORS);
3052 free_dominance_info (CDI_POST_DOMINATORS);
3053 }
3054
3055 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3056 }
3057
3058 static bool
3059 gate_lower_resx (void)
3060 {
3061 return flag_exceptions != 0;
3062 }
3063
3064 struct gimple_opt_pass pass_lower_resx =
3065 {
3066 {
3067 GIMPLE_PASS,
3068 "resx", /* name */
3069 gate_lower_resx, /* gate */
3070 execute_lower_resx, /* execute */
3071 NULL, /* sub */
3072 NULL, /* next */
3073 0, /* static_pass_number */
3074 TV_TREE_EH, /* tv_id */
3075 PROP_gimple_lcf, /* properties_required */
3076 0, /* properties_provided */
3077 0, /* properties_destroyed */
3078 0, /* todo_flags_start */
3079 TODO_verify_flow /* todo_flags_finish */
3080 }
3081 };
3082
3083
3084 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3085 we have found some duplicate labels and removed some edges. */
3086
3087 static bool
3088 lower_eh_dispatch (basic_block src, gimple stmt)
3089 {
3090 gimple_stmt_iterator gsi;
3091 int region_nr;
3092 eh_region r;
3093 tree filter, fn;
3094 gimple x;
3095 bool redirected = false;
3096
3097 region_nr = gimple_eh_dispatch_region (stmt);
3098 r = get_eh_region_from_number (region_nr);
3099
3100 gsi = gsi_last_bb (src);
3101
3102 switch (r->type)
3103 {
3104 case ERT_TRY:
3105 {
3106 VEC (tree, heap) *labels = NULL;
3107 tree default_label = NULL;
3108 eh_catch c;
3109 edge_iterator ei;
3110 edge e;
3111 struct pointer_set_t *seen_values = pointer_set_create ();
3112
3113 /* Collect the labels for a switch. Zero the post_landing_pad
3114 field becase we'll no longer have anything keeping these labels
3115 in existance and the optimizer will be free to merge these
3116 blocks at will. */
3117 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3118 {
3119 tree tp_node, flt_node, lab = c->label;
3120 bool have_label = false;
3121
3122 c->label = NULL;
3123 tp_node = c->type_list;
3124 flt_node = c->filter_list;
3125
3126 if (tp_node == NULL)
3127 {
3128 default_label = lab;
3129 break;
3130 }
3131 do
3132 {
3133 /* Filter out duplicate labels that arise when this handler
3134 is shadowed by an earlier one. When no labels are
3135 attached to the handler anymore, we remove
3136 the corresponding edge and then we delete unreachable
3137 blocks at the end of this pass. */
3138 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3139 {
3140 tree t = build_case_label (TREE_VALUE (flt_node),
3141 NULL, lab);
3142 VEC_safe_push (tree, heap, labels, t);
3143 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3144 have_label = true;
3145 }
3146
3147 tp_node = TREE_CHAIN (tp_node);
3148 flt_node = TREE_CHAIN (flt_node);
3149 }
3150 while (tp_node);
3151 if (! have_label)
3152 {
3153 remove_edge (find_edge (src, label_to_block (lab)));
3154 redirected = true;
3155 }
3156 }
3157
3158 /* Clean up the edge flags. */
3159 FOR_EACH_EDGE (e, ei, src->succs)
3160 {
3161 if (e->flags & EDGE_FALLTHRU)
3162 {
3163 /* If there was no catch-all, use the fallthru edge. */
3164 if (default_label == NULL)
3165 default_label = gimple_block_label (e->dest);
3166 e->flags &= ~EDGE_FALLTHRU;
3167 }
3168 }
3169 gcc_assert (default_label != NULL);
3170
3171 /* Don't generate a switch if there's only a default case.
3172 This is common in the form of try { A; } catch (...) { B; }. */
3173 if (labels == NULL)
3174 {
3175 e = single_succ_edge (src);
3176 e->flags |= EDGE_FALLTHRU;
3177 }
3178 else
3179 {
3180 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3181 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3182 region_nr));
3183 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3184 filter = make_ssa_name (filter, x);
3185 gimple_call_set_lhs (x, filter);
3186 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3187
3188 /* Turn the default label into a default case. */
3189 default_label = build_case_label (NULL, NULL, default_label);
3190 sort_case_labels (labels);
3191
3192 x = gimple_build_switch_vec (filter, default_label, labels);
3193 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3194
3195 VEC_free (tree, heap, labels);
3196 }
3197 pointer_set_destroy (seen_values);
3198 }
3199 break;
3200
3201 case ERT_ALLOWED_EXCEPTIONS:
3202 {
3203 edge b_e = BRANCH_EDGE (src);
3204 edge f_e = FALLTHRU_EDGE (src);
3205
3206 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3207 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3208 region_nr));
3209 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3210 filter = make_ssa_name (filter, x);
3211 gimple_call_set_lhs (x, filter);
3212 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3213
3214 r->u.allowed.label = NULL;
3215 x = gimple_build_cond (EQ_EXPR, filter,
3216 build_int_cst (TREE_TYPE (filter),
3217 r->u.allowed.filter),
3218 NULL_TREE, NULL_TREE);
3219 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3220
3221 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3222 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3223 }
3224 break;
3225
3226 default:
3227 gcc_unreachable ();
3228 }
3229
3230 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3231 gsi_remove (&gsi, true);
3232 return redirected;
3233 }
3234
3235 static unsigned
3236 execute_lower_eh_dispatch (void)
3237 {
3238 basic_block bb;
3239 bool any_rewritten = false;
3240 bool redirected = false;
3241
3242 assign_filter_values ();
3243
3244 FOR_EACH_BB (bb)
3245 {
3246 gimple last = last_stmt (bb);
3247 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH)
3248 {
3249 redirected |= lower_eh_dispatch (bb, last);
3250 any_rewritten = true;
3251 }
3252 }
3253
3254 if (redirected)
3255 delete_unreachable_blocks ();
3256 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3257 }
3258
3259 static bool
3260 gate_lower_eh_dispatch (void)
3261 {
3262 return cfun->eh->region_tree != NULL;
3263 }
3264
3265 struct gimple_opt_pass pass_lower_eh_dispatch =
3266 {
3267 {
3268 GIMPLE_PASS,
3269 "ehdisp", /* name */
3270 gate_lower_eh_dispatch, /* gate */
3271 execute_lower_eh_dispatch, /* execute */
3272 NULL, /* sub */
3273 NULL, /* next */
3274 0, /* static_pass_number */
3275 TV_TREE_EH, /* tv_id */
3276 PROP_gimple_lcf, /* properties_required */
3277 0, /* properties_provided */
3278 0, /* properties_destroyed */
3279 0, /* todo_flags_start */
3280 TODO_verify_flow /* todo_flags_finish */
3281 }
3282 };
3283 \f
3284 /* Walk statements, see what regions are really referenced and remove
3285 those that are unused. */
3286
3287 static void
3288 remove_unreachable_handlers (void)
3289 {
3290 sbitmap r_reachable, lp_reachable;
3291 eh_region region;
3292 eh_landing_pad lp;
3293 basic_block bb;
3294 int lp_nr, r_nr;
3295
3296 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3297 lp_reachable
3298 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3299 sbitmap_zero (r_reachable);
3300 sbitmap_zero (lp_reachable);
3301
3302 FOR_EACH_BB (bb)
3303 {
3304 gimple_stmt_iterator gsi;
3305
3306 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3307 {
3308 gimple stmt = gsi_stmt (gsi);
3309 lp_nr = lookup_stmt_eh_lp (stmt);
3310
3311 /* Negative LP numbers are MUST_NOT_THROW regions which
3312 are not considered BB enders. */
3313 if (lp_nr < 0)
3314 SET_BIT (r_reachable, -lp_nr);
3315
3316 /* Positive LP numbers are real landing pads, are are BB enders. */
3317 else if (lp_nr > 0)
3318 {
3319 gcc_assert (gsi_one_before_end_p (gsi));
3320 region = get_eh_region_from_lp_number (lp_nr);
3321 SET_BIT (r_reachable, region->index);
3322 SET_BIT (lp_reachable, lp_nr);
3323 }
3324
3325 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3326 switch (gimple_code (stmt))
3327 {
3328 case GIMPLE_RESX:
3329 SET_BIT (r_reachable, gimple_resx_region (stmt));
3330 break;
3331 case GIMPLE_EH_DISPATCH:
3332 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3333 break;
3334 default:
3335 break;
3336 }
3337 }
3338 }
3339
3340 if (dump_file)
3341 {
3342 fprintf (dump_file, "Before removal of unreachable regions:\n");
3343 dump_eh_tree (dump_file, cfun);
3344 fprintf (dump_file, "Reachable regions: ");
3345 dump_sbitmap_file (dump_file, r_reachable);
3346 fprintf (dump_file, "Reachable landing pads: ");
3347 dump_sbitmap_file (dump_file, lp_reachable);
3348 }
3349
3350 for (r_nr = 1;
3351 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3352 if (region && !TEST_BIT (r_reachable, r_nr))
3353 {
3354 if (dump_file)
3355 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3356 remove_eh_handler (region);
3357 }
3358
3359 for (lp_nr = 1;
3360 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3361 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3362 {
3363 if (dump_file)
3364 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3365 remove_eh_landing_pad (lp);
3366 }
3367
3368 if (dump_file)
3369 {
3370 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3371 dump_eh_tree (dump_file, cfun);
3372 fprintf (dump_file, "\n\n");
3373 }
3374
3375 sbitmap_free (r_reachable);
3376 sbitmap_free (lp_reachable);
3377
3378 #ifdef ENABLE_CHECKING
3379 verify_eh_tree (cfun);
3380 #endif
3381 }
3382
3383 /* Remove regions that do not have landing pads. This assumes
3384 that remove_unreachable_handlers has already been run, and
3385 that we've just manipulated the landing pads since then. */
3386
3387 static void
3388 remove_unreachable_handlers_no_lp (void)
3389 {
3390 eh_region r;
3391 int i;
3392
3393 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3394 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW)
3395 {
3396 if (dump_file)
3397 fprintf (dump_file, "Removing unreachable region %d\n", i);
3398 remove_eh_handler (r);
3399 }
3400 }
3401
3402 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3403 optimisticaly split all sorts of edges, including EH edges. The
3404 optimization passes in between may not have needed them; if not,
3405 we should undo the split.
3406
3407 Recognize this case by having one EH edge incoming to the BB and
3408 one normal edge outgoing; BB should be empty apart from the
3409 post_landing_pad label.
3410
3411 Note that this is slightly different from the empty handler case
3412 handled by cleanup_empty_eh, in that the actual handler may yet
3413 have actual code but the landing pad has been separated from the
3414 handler. As such, cleanup_empty_eh relies on this transformation
3415 having been done first. */
3416
3417 static bool
3418 unsplit_eh (eh_landing_pad lp)
3419 {
3420 basic_block bb = label_to_block (lp->post_landing_pad);
3421 gimple_stmt_iterator gsi;
3422 edge e_in, e_out;
3423
3424 /* Quickly check the edge counts on BB for singularity. */
3425 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3426 return false;
3427 e_in = EDGE_PRED (bb, 0);
3428 e_out = EDGE_SUCC (bb, 0);
3429
3430 /* Input edge must be EH and output edge must be normal. */
3431 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3432 return false;
3433
3434 /* The block must be empty except for the labels and debug insns. */
3435 gsi = gsi_after_labels (bb);
3436 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3437 gsi_next_nondebug (&gsi);
3438 if (!gsi_end_p (gsi))
3439 return false;
3440
3441 /* The destination block must not already have a landing pad
3442 for a different region. */
3443 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3444 {
3445 gimple stmt = gsi_stmt (gsi);
3446 tree lab;
3447 int lp_nr;
3448
3449 if (gimple_code (stmt) != GIMPLE_LABEL)
3450 break;
3451 lab = gimple_label_label (stmt);
3452 lp_nr = EH_LANDING_PAD_NR (lab);
3453 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3454 return false;
3455 }
3456
3457 /* The new destination block must not already be a destination of
3458 the source block, lest we merge fallthru and eh edges and get
3459 all sorts of confused. */
3460 if (find_edge (e_in->src, e_out->dest))
3461 return false;
3462
3463 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3464 thought this should have been cleaned up by a phicprop pass, but
3465 that doesn't appear to handle virtuals. Propagate by hand. */
3466 if (!gimple_seq_empty_p (phi_nodes (bb)))
3467 {
3468 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3469 {
3470 gimple use_stmt, phi = gsi_stmt (gsi);
3471 tree lhs = gimple_phi_result (phi);
3472 tree rhs = gimple_phi_arg_def (phi, 0);
3473 use_operand_p use_p;
3474 imm_use_iterator iter;
3475
3476 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3477 {
3478 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3479 SET_USE (use_p, rhs);
3480 }
3481
3482 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3483 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3484
3485 remove_phi_node (&gsi, true);
3486 }
3487 }
3488
3489 if (dump_file && (dump_flags & TDF_DETAILS))
3490 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3491 lp->index, e_out->dest->index);
3492
3493 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3494 a successor edge, humor it. But do the real CFG change with the
3495 predecessor of E_OUT in order to preserve the ordering of arguments
3496 to the PHI nodes in E_OUT->DEST. */
3497 redirect_eh_edge_1 (e_in, e_out->dest, false);
3498 redirect_edge_pred (e_out, e_in->src);
3499 e_out->flags = e_in->flags;
3500 e_out->probability = e_in->probability;
3501 e_out->count = e_in->count;
3502 remove_edge (e_in);
3503
3504 return true;
3505 }
3506
3507 /* Examine each landing pad block and see if it matches unsplit_eh. */
3508
3509 static bool
3510 unsplit_all_eh (void)
3511 {
3512 bool changed = false;
3513 eh_landing_pad lp;
3514 int i;
3515
3516 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3517 if (lp)
3518 changed |= unsplit_eh (lp);
3519
3520 return changed;
3521 }
3522
3523 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3524 to OLD_BB to NEW_BB; return true on success, false on failure.
3525
3526 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3527 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3528 Virtual PHIs may be deleted and marked for renaming. */
3529
3530 static bool
3531 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3532 edge old_bb_out, bool change_region)
3533 {
3534 gimple_stmt_iterator ngsi, ogsi;
3535 edge_iterator ei;
3536 edge e;
3537 bitmap rename_virts;
3538 bitmap ophi_handled;
3539
3540 FOR_EACH_EDGE (e, ei, old_bb->preds)
3541 redirect_edge_var_map_clear (e);
3542
3543 ophi_handled = BITMAP_ALLOC (NULL);
3544 rename_virts = BITMAP_ALLOC (NULL);
3545
3546 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3547 for the edges we're going to move. */
3548 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3549 {
3550 gimple ophi, nphi = gsi_stmt (ngsi);
3551 tree nresult, nop;
3552
3553 nresult = gimple_phi_result (nphi);
3554 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3555
3556 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3557 the source ssa_name. */
3558 ophi = NULL;
3559 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3560 {
3561 ophi = gsi_stmt (ogsi);
3562 if (gimple_phi_result (ophi) == nop)
3563 break;
3564 ophi = NULL;
3565 }
3566
3567 /* If we did find the corresponding PHI, copy those inputs. */
3568 if (ophi)
3569 {
3570 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3571 if (!has_single_use (nop))
3572 {
3573 imm_use_iterator imm_iter;
3574 use_operand_p use_p;
3575
3576 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3577 {
3578 if (!gimple_debug_bind_p (USE_STMT (use_p))
3579 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3580 || gimple_bb (USE_STMT (use_p)) != new_bb))
3581 goto fail;
3582 }
3583 }
3584 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3585 FOR_EACH_EDGE (e, ei, old_bb->preds)
3586 {
3587 location_t oloc;
3588 tree oop;
3589
3590 if ((e->flags & EDGE_EH) == 0)
3591 continue;
3592 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3593 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3594 redirect_edge_var_map_add (e, nresult, oop, oloc);
3595 }
3596 }
3597 /* If we didn't find the PHI, but it's a VOP, remember to rename
3598 it later, assuming all other tests succeed. */
3599 else if (!is_gimple_reg (nresult))
3600 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3601 /* If we didn't find the PHI, and it's a real variable, we know
3602 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3603 variable is unchanged from input to the block and we can simply
3604 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3605 else
3606 {
3607 location_t nloc
3608 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3609 FOR_EACH_EDGE (e, ei, old_bb->preds)
3610 redirect_edge_var_map_add (e, nresult, nop, nloc);
3611 }
3612 }
3613
3614 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3615 we don't know what values from the other edges into NEW_BB to use. */
3616 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3617 {
3618 gimple ophi = gsi_stmt (ogsi);
3619 tree oresult = gimple_phi_result (ophi);
3620 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3621 goto fail;
3622 }
3623
3624 /* At this point we know that the merge will succeed. Remove the PHI
3625 nodes for the virtuals that we want to rename. */
3626 if (!bitmap_empty_p (rename_virts))
3627 {
3628 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3629 {
3630 gimple nphi = gsi_stmt (ngsi);
3631 tree nresult = gimple_phi_result (nphi);
3632 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3633 {
3634 mark_virtual_phi_result_for_renaming (nphi);
3635 remove_phi_node (&ngsi, true);
3636 }
3637 else
3638 gsi_next (&ngsi);
3639 }
3640 }
3641
3642 /* Finally, move the edges and update the PHIs. */
3643 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3644 if (e->flags & EDGE_EH)
3645 {
3646 redirect_eh_edge_1 (e, new_bb, change_region);
3647 redirect_edge_succ (e, new_bb);
3648 flush_pending_stmts (e);
3649 }
3650 else
3651 ei_next (&ei);
3652
3653 BITMAP_FREE (ophi_handled);
3654 BITMAP_FREE (rename_virts);
3655 return true;
3656
3657 fail:
3658 FOR_EACH_EDGE (e, ei, old_bb->preds)
3659 redirect_edge_var_map_clear (e);
3660 BITMAP_FREE (ophi_handled);
3661 BITMAP_FREE (rename_virts);
3662 return false;
3663 }
3664
3665 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3666 old region to NEW_REGION at BB. */
3667
3668 static void
3669 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3670 eh_landing_pad lp, eh_region new_region)
3671 {
3672 gimple_stmt_iterator gsi;
3673 eh_landing_pad *pp;
3674
3675 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3676 continue;
3677 *pp = lp->next_lp;
3678
3679 lp->region = new_region;
3680 lp->next_lp = new_region->landing_pads;
3681 new_region->landing_pads = lp;
3682
3683 /* Delete the RESX that was matched within the empty handler block. */
3684 gsi = gsi_last_bb (bb);
3685 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3686 gsi_remove (&gsi, true);
3687
3688 /* Clean up E_OUT for the fallthru. */
3689 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3690 e_out->probability = REG_BR_PROB_BASE;
3691 }
3692
3693 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3694 unsplitting than unsplit_eh was prepared to handle, e.g. when
3695 multiple incoming edges and phis are involved. */
3696
3697 static bool
3698 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3699 {
3700 gimple_stmt_iterator gsi;
3701 tree lab;
3702 edge_iterator ei;
3703 edge e;
3704
3705 /* We really ought not have totally lost everything following
3706 a landing pad label. Given that BB is empty, there had better
3707 be a successor. */
3708 gcc_assert (e_out != NULL);
3709
3710 /* The destination block must not already have a landing pad
3711 for a different region. */
3712 lab = NULL;
3713 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3714 {
3715 gimple stmt = gsi_stmt (gsi);
3716 int lp_nr;
3717
3718 if (gimple_code (stmt) != GIMPLE_LABEL)
3719 break;
3720 lab = gimple_label_label (stmt);
3721 lp_nr = EH_LANDING_PAD_NR (lab);
3722 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3723 return false;
3724 }
3725
3726 /* The destination block must not be a regular successor for any
3727 of the preds of the landing pad. Thus, avoid turning
3728 <..>
3729 | \ EH
3730 | <..>
3731 | /
3732 <..>
3733 into
3734 <..>
3735 | | EH
3736 <..>
3737 which CFG verification would choke on. See PR45172. */
3738 FOR_EACH_EDGE (e, ei, bb->preds)
3739 if (find_edge (e->src, e_out->dest))
3740 return false;
3741
3742 /* Attempt to move the PHIs into the successor block. */
3743 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
3744 {
3745 if (dump_file && (dump_flags & TDF_DETAILS))
3746 fprintf (dump_file,
3747 "Unsplit EH landing pad %d to block %i "
3748 "(via cleanup_empty_eh).\n",
3749 lp->index, e_out->dest->index);
3750 return true;
3751 }
3752
3753 return false;
3754 }
3755
3756 /* Return true if edge E_FIRST is part of an empty infinite loop
3757 or leads to such a loop through a series of single successor
3758 empty bbs. */
3759
3760 static bool
3761 infinite_empty_loop_p (edge e_first)
3762 {
3763 bool inf_loop = false;
3764 edge e;
3765
3766 if (e_first->dest == e_first->src)
3767 return true;
3768
3769 e_first->src->aux = (void *) 1;
3770 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
3771 {
3772 gimple_stmt_iterator gsi;
3773 if (e->dest->aux)
3774 {
3775 inf_loop = true;
3776 break;
3777 }
3778 e->dest->aux = (void *) 1;
3779 gsi = gsi_after_labels (e->dest);
3780 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3781 gsi_next_nondebug (&gsi);
3782 if (!gsi_end_p (gsi))
3783 break;
3784 }
3785 e_first->src->aux = NULL;
3786 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
3787 e->dest->aux = NULL;
3788
3789 return inf_loop;
3790 }
3791
3792 /* Examine the block associated with LP to determine if it's an empty
3793 handler for its EH region. If so, attempt to redirect EH edges to
3794 an outer region. Return true the CFG was updated in any way. This
3795 is similar to jump forwarding, just across EH edges. */
3796
3797 static bool
3798 cleanup_empty_eh (eh_landing_pad lp)
3799 {
3800 basic_block bb = label_to_block (lp->post_landing_pad);
3801 gimple_stmt_iterator gsi;
3802 gimple resx;
3803 eh_region new_region;
3804 edge_iterator ei;
3805 edge e, e_out;
3806 bool has_non_eh_pred;
3807 int new_lp_nr;
3808
3809 /* There can be zero or one edges out of BB. This is the quickest test. */
3810 switch (EDGE_COUNT (bb->succs))
3811 {
3812 case 0:
3813 e_out = NULL;
3814 break;
3815 case 1:
3816 e_out = EDGE_SUCC (bb, 0);
3817 break;
3818 default:
3819 return false;
3820 }
3821 gsi = gsi_after_labels (bb);
3822
3823 /* Make sure to skip debug statements. */
3824 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3825 gsi_next_nondebug (&gsi);
3826
3827 /* If the block is totally empty, look for more unsplitting cases. */
3828 if (gsi_end_p (gsi))
3829 {
3830 /* For the degenerate case of an infinite loop bail out. */
3831 if (infinite_empty_loop_p (e_out))
3832 return false;
3833
3834 return cleanup_empty_eh_unsplit (bb, e_out, lp);
3835 }
3836
3837 /* The block should consist only of a single RESX statement, modulo a
3838 preceding call to __builtin_stack_restore if there is no outgoing
3839 edge, since the call can be eliminated in this case. */
3840 resx = gsi_stmt (gsi);
3841 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
3842 {
3843 gsi_next (&gsi);
3844 resx = gsi_stmt (gsi);
3845 }
3846 if (!is_gimple_resx (resx))
3847 return false;
3848 gcc_assert (gsi_one_before_end_p (gsi));
3849
3850 /* Determine if there are non-EH edges, or resx edges into the handler. */
3851 has_non_eh_pred = false;
3852 FOR_EACH_EDGE (e, ei, bb->preds)
3853 if (!(e->flags & EDGE_EH))
3854 has_non_eh_pred = true;
3855
3856 /* Find the handler that's outer of the empty handler by looking at
3857 where the RESX instruction was vectored. */
3858 new_lp_nr = lookup_stmt_eh_lp (resx);
3859 new_region = get_eh_region_from_lp_number (new_lp_nr);
3860
3861 /* If there's no destination region within the current function,
3862 redirection is trivial via removing the throwing statements from
3863 the EH region, removing the EH edges, and allowing the block
3864 to go unreachable. */
3865 if (new_region == NULL)
3866 {
3867 gcc_assert (e_out == NULL);
3868 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3869 if (e->flags & EDGE_EH)
3870 {
3871 gimple stmt = last_stmt (e->src);
3872 remove_stmt_from_eh_lp (stmt);
3873 remove_edge (e);
3874 }
3875 else
3876 ei_next (&ei);
3877 goto succeed;
3878 }
3879
3880 /* If the destination region is a MUST_NOT_THROW, allow the runtime
3881 to handle the abort and allow the blocks to go unreachable. */
3882 if (new_region->type == ERT_MUST_NOT_THROW)
3883 {
3884 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3885 if (e->flags & EDGE_EH)
3886 {
3887 gimple stmt = last_stmt (e->src);
3888 remove_stmt_from_eh_lp (stmt);
3889 add_stmt_to_eh_lp (stmt, new_lp_nr);
3890 remove_edge (e);
3891 }
3892 else
3893 ei_next (&ei);
3894 goto succeed;
3895 }
3896
3897 /* Try to redirect the EH edges and merge the PHIs into the destination
3898 landing pad block. If the merge succeeds, we'll already have redirected
3899 all the EH edges. The handler itself will go unreachable if there were
3900 no normal edges. */
3901 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
3902 goto succeed;
3903
3904 /* Finally, if all input edges are EH edges, then we can (potentially)
3905 reduce the number of transfers from the runtime by moving the landing
3906 pad from the original region to the new region. This is a win when
3907 we remove the last CLEANUP region along a particular exception
3908 propagation path. Since nothing changes except for the region with
3909 which the landing pad is associated, the PHI nodes do not need to be
3910 adjusted at all. */
3911 if (!has_non_eh_pred)
3912 {
3913 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
3914 if (dump_file && (dump_flags & TDF_DETAILS))
3915 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
3916 lp->index, new_region->index);
3917
3918 /* ??? The CFG didn't change, but we may have rendered the
3919 old EH region unreachable. Trigger a cleanup there. */
3920 return true;
3921 }
3922
3923 return false;
3924
3925 succeed:
3926 if (dump_file && (dump_flags & TDF_DETAILS))
3927 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
3928 remove_eh_landing_pad (lp);
3929 return true;
3930 }
3931
3932 /* Do a post-order traversal of the EH region tree. Examine each
3933 post_landing_pad block and see if we can eliminate it as empty. */
3934
3935 static bool
3936 cleanup_all_empty_eh (void)
3937 {
3938 bool changed = false;
3939 eh_landing_pad lp;
3940 int i;
3941
3942 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3943 if (lp)
3944 changed |= cleanup_empty_eh (lp);
3945
3946 return changed;
3947 }
3948
3949 /* Perform cleanups and lowering of exception handling
3950 1) cleanups regions with handlers doing nothing are optimized out
3951 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3952 3) Info about regions that are containing instructions, and regions
3953 reachable via local EH edges is collected
3954 4) Eh tree is pruned for regions no longer neccesary.
3955
3956 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
3957 Unify those that have the same failure decl and locus.
3958 */
3959
3960 static unsigned int
3961 execute_cleanup_eh_1 (void)
3962 {
3963 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
3964 looking up unreachable landing pads. */
3965 remove_unreachable_handlers ();
3966
3967 /* Watch out for the region tree vanishing due to all unreachable. */
3968 if (cfun->eh->region_tree && optimize)
3969 {
3970 bool changed = false;
3971
3972 changed |= unsplit_all_eh ();
3973 changed |= cleanup_all_empty_eh ();
3974
3975 if (changed)
3976 {
3977 free_dominance_info (CDI_DOMINATORS);
3978 free_dominance_info (CDI_POST_DOMINATORS);
3979
3980 /* We delayed all basic block deletion, as we may have performed
3981 cleanups on EH edges while non-EH edges were still present. */
3982 delete_unreachable_blocks ();
3983
3984 /* We manipulated the landing pads. Remove any region that no
3985 longer has a landing pad. */
3986 remove_unreachable_handlers_no_lp ();
3987
3988 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
3989 }
3990 }
3991
3992 return 0;
3993 }
3994
3995 static unsigned int
3996 execute_cleanup_eh (void)
3997 {
3998 int ret = execute_cleanup_eh_1 ();
3999
4000 /* If the function no longer needs an EH personality routine
4001 clear it. This exposes cross-language inlining opportunities
4002 and avoids references to a never defined personality routine. */
4003 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4004 && function_needs_eh_personality (cfun) != eh_personality_lang)
4005 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4006
4007 return ret;
4008 }
4009
4010 static bool
4011 gate_cleanup_eh (void)
4012 {
4013 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4014 }
4015
4016 struct gimple_opt_pass pass_cleanup_eh = {
4017 {
4018 GIMPLE_PASS,
4019 "ehcleanup", /* name */
4020 gate_cleanup_eh, /* gate */
4021 execute_cleanup_eh, /* execute */
4022 NULL, /* sub */
4023 NULL, /* next */
4024 0, /* static_pass_number */
4025 TV_TREE_EH, /* tv_id */
4026 PROP_gimple_lcf, /* properties_required */
4027 0, /* properties_provided */
4028 0, /* properties_destroyed */
4029 0, /* todo_flags_start */
4030 0 /* todo_flags_finish */
4031 }
4032 };
4033 \f
4034 /* Verify that BB containing STMT as the last statement, has precisely the
4035 edge that make_eh_edges would create. */
4036
4037 DEBUG_FUNCTION bool
4038 verify_eh_edges (gimple stmt)
4039 {
4040 basic_block bb = gimple_bb (stmt);
4041 eh_landing_pad lp = NULL;
4042 int lp_nr;
4043 edge_iterator ei;
4044 edge e, eh_edge;
4045
4046 lp_nr = lookup_stmt_eh_lp (stmt);
4047 if (lp_nr > 0)
4048 lp = get_eh_landing_pad_from_number (lp_nr);
4049
4050 eh_edge = NULL;
4051 FOR_EACH_EDGE (e, ei, bb->succs)
4052 {
4053 if (e->flags & EDGE_EH)
4054 {
4055 if (eh_edge)
4056 {
4057 error ("BB %i has multiple EH edges", bb->index);
4058 return true;
4059 }
4060 else
4061 eh_edge = e;
4062 }
4063 }
4064
4065 if (lp == NULL)
4066 {
4067 if (eh_edge)
4068 {
4069 error ("BB %i can not throw but has an EH edge", bb->index);
4070 return true;
4071 }
4072 return false;
4073 }
4074
4075 if (!stmt_could_throw_p (stmt))
4076 {
4077 error ("BB %i last statement has incorrectly set lp", bb->index);
4078 return true;
4079 }
4080
4081 if (eh_edge == NULL)
4082 {
4083 error ("BB %i is missing an EH edge", bb->index);
4084 return true;
4085 }
4086
4087 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4088 {
4089 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4090 return true;
4091 }
4092
4093 return false;
4094 }
4095
4096 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4097
4098 DEBUG_FUNCTION bool
4099 verify_eh_dispatch_edge (gimple stmt)
4100 {
4101 eh_region r;
4102 eh_catch c;
4103 basic_block src, dst;
4104 bool want_fallthru = true;
4105 edge_iterator ei;
4106 edge e, fall_edge;
4107
4108 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4109 src = gimple_bb (stmt);
4110
4111 FOR_EACH_EDGE (e, ei, src->succs)
4112 gcc_assert (e->aux == NULL);
4113
4114 switch (r->type)
4115 {
4116 case ERT_TRY:
4117 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4118 {
4119 dst = label_to_block (c->label);
4120 e = find_edge (src, dst);
4121 if (e == NULL)
4122 {
4123 error ("BB %i is missing an edge", src->index);
4124 return true;
4125 }
4126 e->aux = (void *)e;
4127
4128 /* A catch-all handler doesn't have a fallthru. */
4129 if (c->type_list == NULL)
4130 {
4131 want_fallthru = false;
4132 break;
4133 }
4134 }
4135 break;
4136
4137 case ERT_ALLOWED_EXCEPTIONS:
4138 dst = label_to_block (r->u.allowed.label);
4139 e = find_edge (src, dst);
4140 if (e == NULL)
4141 {
4142 error ("BB %i is missing an edge", src->index);
4143 return true;
4144 }
4145 e->aux = (void *)e;
4146 break;
4147
4148 default:
4149 gcc_unreachable ();
4150 }
4151
4152 fall_edge = NULL;
4153 FOR_EACH_EDGE (e, ei, src->succs)
4154 {
4155 if (e->flags & EDGE_FALLTHRU)
4156 {
4157 if (fall_edge != NULL)
4158 {
4159 error ("BB %i too many fallthru edges", src->index);
4160 return true;
4161 }
4162 fall_edge = e;
4163 }
4164 else if (e->aux)
4165 e->aux = NULL;
4166 else
4167 {
4168 error ("BB %i has incorrect edge", src->index);
4169 return true;
4170 }
4171 }
4172 if ((fall_edge != NULL) ^ want_fallthru)
4173 {
4174 error ("BB %i has incorrect fallthru edge", src->index);
4175 return true;
4176 }
4177
4178 return false;
4179 }