tree.h (PHI_CHAIN): New.
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39
40 /* HACK */
41 extern int using_eh_for_cleanups_p;
42 \f
43 /* Misc functions used in this file. */
44
45 /* Compare and hash for any structure which begins with a canonical
46 pointer. Assumes all pointers are interchangable, which is sort
47 of already assumed by gcc elsewhere IIRC. */
48
49 static int
50 struct_ptr_eq (const void *a, const void *b)
51 {
52 const void * const * x = a;
53 const void * const * y = b;
54 return *x == *y;
55 }
56
57 static hashval_t
58 struct_ptr_hash (const void *a)
59 {
60 const void * const * x = a;
61 return (size_t)*x >> 4;
62 }
63
64 \f
65 /* Remember and lookup EH region data for arbitrary statements.
66 Really this means any statement that could_throw_p. We could
67 stuff this information into the stmt_ann data structure, but:
68
69 (1) We absolutely rely on this information being kept until
70 we get to rtl. Once we're done with lowering here, if we lose
71 the information there's no way to recover it!
72
73 (2) There are many more statements that *cannot* throw as
74 compared to those that can. We should be saving some amount
75 of space by only allocating memory for those that can throw. */
76
77 struct throw_stmt_node GTY(())
78 {
79 tree stmt;
80 int region_nr;
81 };
82
83 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
84
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
87 {
88 struct throw_stmt_node *n;
89 void **slot;
90
91 if (!region)
92 return;
93
94 n = ggc_alloc (sizeof (*n));
95 n->stmt = t;
96 n->region_nr = get_eh_region_number (region);
97
98 slot = htab_find_slot (throw_stmt_table, n, INSERT);
99 if (*slot)
100 abort ();
101 *slot = n;
102 }
103
104 void
105 add_stmt_to_eh_region (tree t, int num)
106 {
107 struct throw_stmt_node *n;
108 void **slot;
109
110 if (num < 0)
111 abort ();
112
113 n = ggc_alloc (sizeof (*n));
114 n->stmt = t;
115 n->region_nr = num;
116
117 slot = htab_find_slot (throw_stmt_table, n, INSERT);
118 if (*slot)
119 abort ();
120 *slot = n;
121 }
122
123 int
124 lookup_stmt_eh_region (tree t)
125 {
126 struct throw_stmt_node *p, n;
127
128 if (!throw_stmt_table)
129 return -2;
130
131 n.stmt = t;
132 p = htab_find (throw_stmt_table, &n);
133
134 return (p ? p->region_nr : -1);
135 }
136
137
138 \f
139 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
140 nodes and LABEL_DECL nodes. We will use this during the second phase to
141 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
142
143 struct finally_tree_node
144 {
145 tree child, parent;
146 };
147
148 /* Note that this table is *not* marked GTY. It is short-lived. */
149 static htab_t finally_tree;
150
151 static void
152 record_in_finally_tree (tree child, tree parent)
153 {
154 struct finally_tree_node *n;
155 void **slot;
156
157 n = xmalloc (sizeof (*n));
158 n->child = child;
159 n->parent = parent;
160
161 slot = htab_find_slot (finally_tree, n, INSERT);
162 if (*slot)
163 abort ();
164 *slot = n;
165 }
166
167 static void
168 collect_finally_tree (tree t, tree region)
169 {
170 tailrecurse:
171 switch (TREE_CODE (t))
172 {
173 case LABEL_EXPR:
174 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
175 break;
176
177 case TRY_FINALLY_EXPR:
178 record_in_finally_tree (t, region);
179 collect_finally_tree (TREE_OPERAND (t, 0), t);
180 t = TREE_OPERAND (t, 1);
181 goto tailrecurse;
182
183 case TRY_CATCH_EXPR:
184 collect_finally_tree (TREE_OPERAND (t, 0), region);
185 t = TREE_OPERAND (t, 1);
186 goto tailrecurse;
187
188 case CATCH_EXPR:
189 t = CATCH_BODY (t);
190 goto tailrecurse;
191
192 case EH_FILTER_EXPR:
193 t = EH_FILTER_FAILURE (t);
194 goto tailrecurse;
195
196 case STATEMENT_LIST:
197 {
198 tree_stmt_iterator i;
199 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
200 collect_finally_tree (tsi_stmt (i), region);
201 }
202 break;
203
204 default:
205 /* A type, a decl, or some kind of statement that we're not
206 interested in. Don't walk them. */
207 break;
208 }
209 }
210
211 /* Use the finally tree to determine if a jump from START to TARGET
212 would leave the try_finally node that START lives in. */
213
214 static bool
215 outside_finally_tree (tree start, tree target)
216 {
217 struct finally_tree_node n, *p;
218
219 do
220 {
221 n.child = start;
222 p = htab_find (finally_tree, &n);
223 if (!p)
224 return true;
225 start = p->parent;
226 }
227 while (start != target);
228
229 return false;
230 }
231 \f
232 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
233 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
234 The eh region creation is straight-forward, but frobbing all the gotos
235 and such into shape isn't. */
236
237 /* State of the world while lowering. */
238
239 struct leh_state
240 {
241 /* What's "current" while constructing the eh region tree. These
242 correspond to variables of the same name in cfun->eh, which we
243 don't have easy access to. */
244 struct eh_region *cur_region;
245 struct eh_region *prev_try;
246
247 /* Processing of TRY_FINALLY requires a bit more state. This is
248 split out into a separate structure so that we don't have to
249 copy so much when processing other nodes. */
250 struct leh_tf_state *tf;
251 };
252
253 struct leh_tf_state
254 {
255 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
256 is the original TRY_FINALLY_EXPR. We need to retain this so that
257 outside_finally_tree can reliably reference the tree used in the
258 collect_finally_tree data structures. */
259 tree try_finally_expr;
260 tree *top_p;
261
262 /* The state outside this try_finally node. */
263 struct leh_state *outer;
264
265 /* The exception region created for it. */
266 struct eh_region *region;
267
268 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
269 that are seen to escape this TRY_FINALLY_EXPR node. */
270 struct goto_queue_node {
271 tree stmt;
272 tree repl_stmt;
273 tree cont_stmt;
274 int index;
275 } *goto_queue;
276 size_t goto_queue_size;
277 size_t goto_queue_active;
278
279 /* The set of unique labels seen as entries in the goto queue. */
280 varray_type dest_array;
281
282 /* A label to be added at the end of the completed transformed
283 sequence. It will be set if may_fallthru was true *at one time*,
284 though subsequent transformations may have cleared that flag. */
285 tree fallthru_label;
286
287 /* A label that has been registered with except.c to be the
288 landing pad for this try block. */
289 tree eh_label;
290
291 /* True if it is possible to fall out the bottom of the try block.
292 Cleared if the fallthru is converted to a goto. */
293 bool may_fallthru;
294
295 /* True if any entry in goto_queue is a RETURN_EXPR. */
296 bool may_return;
297
298 /* True if the finally block can receive an exception edge.
299 Cleared if the exception case is handled by code duplication. */
300 bool may_throw;
301 };
302
303 static void lower_eh_filter (struct leh_state *, tree *);
304 static void lower_eh_constructs_1 (struct leh_state *, tree *);
305
306 /* Comparison function for qsort/bsearch. We're interested in
307 searching goto queue elements for source statements. */
308
309 static int
310 goto_queue_cmp (const void *x, const void *y)
311 {
312 tree a = ((const struct goto_queue_node *)x)->stmt;
313 tree b = ((const struct goto_queue_node *)y)->stmt;
314 return (a == b ? 0 : a < b ? -1 : 1);
315 }
316
317 /* Search for STMT in the goto queue. Return the replacement,
318 or null if the statement isn't in the queue. */
319
320 static tree
321 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
322 {
323 struct goto_queue_node tmp, *ret;
324 tmp.stmt = stmt;
325 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
326 sizeof (struct goto_queue_node), goto_queue_cmp);
327 return (ret ? ret->repl_stmt : NULL);
328 }
329
330 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
331 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
332 then we can just splat it in, otherwise we add the new stmts immediately
333 after the COND_EXPR and redirect. */
334
335 static void
336 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
337 tree_stmt_iterator *tsi)
338 {
339 tree new, one, label;
340
341 new = find_goto_replacement (tf, *tp);
342 if (!new)
343 return;
344
345 one = expr_only (new);
346 if (one && TREE_CODE (one) == GOTO_EXPR)
347 {
348 *tp = one;
349 return;
350 }
351
352 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
353 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
354
355 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
356 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
357 }
358
359 /* The real work of replace_goto_queue. Returns with TSI updated to
360 point to the next statement. */
361
362 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
363
364 static void
365 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
366 {
367 switch (TREE_CODE (t))
368 {
369 case GOTO_EXPR:
370 case RETURN_EXPR:
371 t = find_goto_replacement (tf, t);
372 if (t)
373 {
374 tsi_link_before (tsi, t, TSI_SAME_STMT);
375 tsi_delink (tsi);
376 return;
377 }
378 break;
379
380 case COND_EXPR:
381 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
382 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
383 break;
384
385 case TRY_FINALLY_EXPR:
386 case TRY_CATCH_EXPR:
387 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
388 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
389 break;
390 case CATCH_EXPR:
391 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
392 break;
393 case EH_FILTER_EXPR:
394 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
395 break;
396
397 case STATEMENT_LIST:
398 abort ();
399
400 default:
401 /* These won't have gotos in them. */
402 break;
403 }
404
405 tsi_next (tsi);
406 }
407
408 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
409
410 static void
411 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
412 {
413 tree_stmt_iterator i = tsi_start (t);
414 while (!tsi_end_p (i))
415 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
416 }
417
418 /* Replace all goto queue members. */
419
420 static void
421 replace_goto_queue (struct leh_tf_state *tf)
422 {
423 replace_goto_queue_stmt_list (*tf->top_p, tf);
424 }
425
426 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
427 node, and if so record that fact in the goto queue associated with that
428 try_finally node. */
429
430 static void
431 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
432 {
433 struct leh_tf_state *tf = state->tf;
434 struct goto_queue_node *q;
435 size_t active, size;
436 int index;
437
438 if (!tf)
439 return;
440
441 switch (TREE_CODE (stmt))
442 {
443 case GOTO_EXPR:
444 {
445 tree lab = GOTO_DESTINATION (stmt);
446
447 /* Computed and non-local gotos do not get processed. Given
448 their nature we can neither tell whether we've escaped the
449 finally block nor redirect them if we knew. */
450 if (TREE_CODE (lab) != LABEL_DECL)
451 return;
452
453 /* No need to record gotos that don't leave the try block. */
454 if (! outside_finally_tree (lab, tf->try_finally_expr))
455 return;
456
457 if (! tf->dest_array)
458 {
459 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
460 VARRAY_PUSH_TREE (tf->dest_array, lab);
461 index = 0;
462 }
463 else
464 {
465 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
466 for (index = 0; index < n; ++index)
467 if (VARRAY_TREE (tf->dest_array, index) == lab)
468 break;
469 if (index == n)
470 VARRAY_PUSH_TREE (tf->dest_array, lab);
471 }
472 }
473 break;
474
475 case RETURN_EXPR:
476 tf->may_return = true;
477 index = -1;
478 break;
479
480 default:
481 abort ();
482 }
483
484 active = tf->goto_queue_active;
485 size = tf->goto_queue_size;
486 if (active >= size)
487 {
488 size = (size ? size * 2 : 32);
489 tf->goto_queue_size = size;
490 tf->goto_queue
491 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
492 }
493
494 q = &tf->goto_queue[active];
495 tf->goto_queue_active = active + 1;
496
497 memset (q, 0, sizeof (*q));
498 q->stmt = stmt;
499 q->index = index;
500 }
501
502 #ifdef ENABLE_CHECKING
503 /* We do not process SWITCH_EXPRs for now. As long as the original source
504 was in fact structured, and we've not yet done jump threading, then none
505 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
506
507 static void
508 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
509 {
510 struct leh_tf_state *tf = state->tf;
511 size_t i, n;
512 tree vec;
513
514 if (!tf)
515 return;
516
517 vec = SWITCH_LABELS (switch_expr);
518 n = TREE_VEC_LENGTH (vec);
519
520 for (i = 0; i < n; ++i)
521 {
522 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
523 if (outside_finally_tree (lab, tf->try_finally_expr))
524 abort ();
525 }
526 }
527 #else
528 #define verify_norecord_switch_expr(state, switch_expr)
529 #endif
530
531 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
532 whatever is needed to finish the return. If MOD is non-null, insert it
533 before the new branch. RETURN_VALUE_P is a cache containing a temporary
534 variable to be used in manipulating the value returned from the function. */
535
536 static void
537 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
538 tree *return_value_p)
539 {
540 tree ret_expr = TREE_OPERAND (q->stmt, 0);
541 tree x;
542
543 if (ret_expr)
544 {
545 /* The nasty part about redirecting the return value is that the
546 return value itself is to be computed before the FINALLY block
547 is executed. e.g.
548
549 int x;
550 int foo (void)
551 {
552 x = 0;
553 try {
554 return x;
555 } finally {
556 x++;
557 }
558 }
559
560 should return 0, not 1. Arrange for this to happen by copying
561 computed the return value into a local temporary. This also
562 allows us to redirect multiple return statements through the
563 same destination block; whether this is a net win or not really
564 depends, I guess, but it does make generation of the switch in
565 lower_try_finally_switch easier. */
566
567 if (TREE_CODE (ret_expr) == RESULT_DECL)
568 {
569 if (!*return_value_p)
570 *return_value_p = ret_expr;
571 else if (*return_value_p != ret_expr)
572 abort ();
573 q->cont_stmt = q->stmt;
574 }
575 else if (TREE_CODE (ret_expr) == MODIFY_EXPR)
576 {
577 tree result = TREE_OPERAND (ret_expr, 0);
578 tree new, old = TREE_OPERAND (ret_expr, 1);
579
580 if (!*return_value_p)
581 {
582 if (aggregate_value_p (TREE_TYPE (result),
583 TREE_TYPE (current_function_decl)))
584 /* If this function returns in memory, copy the argument
585 into the return slot now. Otherwise, we might need to
586 worry about magic return semantics, so we need to use a
587 temporary to hold the value until we're actually ready
588 to return. */
589 new = result;
590 else
591 new = create_tmp_var (TREE_TYPE (old), "rettmp");
592 *return_value_p = new;
593 }
594 else
595 new = *return_value_p;
596
597 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
598 append_to_statement_list (x, &q->repl_stmt);
599
600 if (new == result)
601 x = result;
602 else
603 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
604 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
605 }
606 else
607 abort ();
608 }
609 else
610 {
611 /* If we don't return a value, all return statements are the same. */
612 q->cont_stmt = q->stmt;
613 }
614
615 if (mod)
616 append_to_statement_list (mod, &q->repl_stmt);
617
618 x = build1 (GOTO_EXPR, void_type_node, finlab);
619 append_to_statement_list (x, &q->repl_stmt);
620 }
621
622 /* Similar, but easier, for GOTO_EXPR. */
623
624 static void
625 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
626 {
627 tree x;
628
629 q->cont_stmt = q->stmt;
630 if (mod)
631 append_to_statement_list (mod, &q->repl_stmt);
632
633 x = build1 (GOTO_EXPR, void_type_node, finlab);
634 append_to_statement_list (x, &q->repl_stmt);
635 }
636
637 /* We want to transform
638 try { body; } catch { stuff; }
639 to
640 body; goto over; lab: stuff; over:
641
642 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
643 should be placed before the second operand, or NULL. OVER is
644 an existing label that should be put at the exit, or NULL. */
645
646 static void
647 frob_into_branch_around (tree *tp, tree lab, tree over)
648 {
649 tree x, op1;
650
651 op1 = TREE_OPERAND (*tp, 1);
652 *tp = TREE_OPERAND (*tp, 0);
653
654 if (block_may_fallthru (*tp))
655 {
656 if (!over)
657 over = create_artificial_label ();
658 x = build1 (GOTO_EXPR, void_type_node, over);
659 append_to_statement_list (x, tp);
660 }
661
662 if (lab)
663 {
664 x = build1 (LABEL_EXPR, void_type_node, lab);
665 append_to_statement_list (x, tp);
666 }
667
668 append_to_statement_list (op1, tp);
669
670 if (over)
671 {
672 x = build1 (LABEL_EXPR, void_type_node, over);
673 append_to_statement_list (x, tp);
674 }
675 }
676
677 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
678 Make sure to record all new labels found. */
679
680 static tree
681 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
682 {
683 tree region = NULL;
684
685 t = lhd_unsave_expr_now (t);
686
687 if (outer_state->tf)
688 region = outer_state->tf->try_finally_expr;
689 collect_finally_tree (t, region);
690
691 return t;
692 }
693
694 /* A subroutine of lower_try_finally. Create a fallthru label for
695 the given try_finally state. The only tricky bit here is that
696 we have to make sure to record the label in our outer context. */
697
698 static tree
699 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
700 {
701 tree label = tf->fallthru_label;
702 if (!label)
703 {
704 label = create_artificial_label ();
705 tf->fallthru_label = label;
706 if (tf->outer->tf)
707 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
708 }
709 return label;
710 }
711
712 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
713 returns non-null, then the language requires that the exception path out
714 of a try_finally be treated specially. To wit: the code within the
715 finally block may not itself throw an exception. We have two choices here.
716 First we can duplicate the finally block and wrap it in a must_not_throw
717 region. Second, we can generate code like
718
719 try {
720 finally_block;
721 } catch {
722 if (fintmp == eh_edge)
723 protect_cleanup_actions;
724 }
725
726 where "fintmp" is the temporary used in the switch statement generation
727 alternative considered below. For the nonce, we always choose the first
728 option.
729
730 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
731
732 static void
733 honor_protect_cleanup_actions (struct leh_state *outer_state,
734 struct leh_state *this_state,
735 struct leh_tf_state *tf)
736 {
737 tree protect_cleanup_actions, finally, x;
738 tree_stmt_iterator i;
739 bool finally_may_fallthru;
740
741 /* First check for nothing to do. */
742 if (lang_protect_cleanup_actions)
743 protect_cleanup_actions = lang_protect_cleanup_actions ();
744 else
745 protect_cleanup_actions = NULL;
746
747 finally = TREE_OPERAND (*tf->top_p, 1);
748
749 /* If the EH case of the finally block can fall through, this may be a
750 structure of the form
751 try {
752 try {
753 throw ...;
754 } cleanup {
755 try {
756 throw ...;
757 } catch (...) {
758 }
759 }
760 } catch (...) {
761 yyy;
762 }
763 E.g. with an inline destructor with an embedded try block. In this
764 case we must save the runtime EH data around the nested exception.
765
766 This complication means that any time the previous runtime data might
767 be used (via fallthru from the finally) we handle the eh case here,
768 whether or not protect_cleanup_actions is active. */
769
770 finally_may_fallthru = block_may_fallthru (finally);
771 if (!finally_may_fallthru && !protect_cleanup_actions)
772 return;
773
774 /* Duplicate the FINALLY block. Only need to do this for try-finally,
775 and not for cleanups. */
776 if (this_state)
777 finally = lower_try_finally_dup_block (finally, outer_state);
778
779 /* Resume execution after the exception. Adding this now lets
780 lower_eh_filter not add unnecessary gotos, as it is clear that
781 we never fallthru from this copy of the finally block. */
782 if (finally_may_fallthru)
783 {
784 tree save_eptr, save_filt;
785
786 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
787 save_filt = create_tmp_var (integer_type_node, "save_filt");
788
789 i = tsi_start (finally);
790 x = build (EXC_PTR_EXPR, ptr_type_node);
791 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
792 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
793
794 x = build (FILTER_EXPR, integer_type_node);
795 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
796 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
797
798 i = tsi_last (finally);
799 x = build (EXC_PTR_EXPR, ptr_type_node);
800 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
801 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
802
803 x = build (FILTER_EXPR, integer_type_node);
804 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
805 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
806
807 x = build1 (RESX_EXPR, void_type_node,
808 build_int_2 (get_eh_region_number (tf->region), 0));
809 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
810 }
811
812 /* Wrap the block with protect_cleanup_actions as the action. */
813 if (protect_cleanup_actions)
814 {
815 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
816 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
817 EH_FILTER_MUST_NOT_THROW (x) = 1;
818 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
819 lower_eh_filter (outer_state, &finally);
820 }
821 else
822 lower_eh_constructs_1 (outer_state, &finally);
823
824 /* Hook this up to the end of the existing try block. If we
825 previously fell through the end, we'll have to branch around.
826 This means adding a new goto, and adding it to the queue. */
827
828 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
829
830 if (tf->may_fallthru)
831 {
832 x = lower_try_finally_fallthru_label (tf);
833 x = build1 (GOTO_EXPR, void_type_node, x);
834 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
835
836 if (this_state)
837 maybe_record_in_goto_queue (this_state, x);
838
839 tf->may_fallthru = false;
840 }
841
842 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
843 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
844 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
845
846 /* Having now been handled, EH isn't to be considered with
847 the rest of the outgoing edges. */
848 tf->may_throw = false;
849 }
850
851 /* A subroutine of lower_try_finally. We have determined that there is
852 no fallthru edge out of the finally block. This means that there is
853 no outgoing edge corresponding to any incoming edge. Restructure the
854 try_finally node for this special case. */
855
856 static void
857 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
858 {
859 tree x, finally, lab, return_val;
860 struct goto_queue_node *q, *qe;
861
862 if (tf->may_throw)
863 lab = tf->eh_label;
864 else
865 lab = create_artificial_label ();
866
867 finally = TREE_OPERAND (*tf->top_p, 1);
868 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
869
870 x = build1 (LABEL_EXPR, void_type_node, lab);
871 append_to_statement_list (x, tf->top_p);
872
873 return_val = NULL;
874 q = tf->goto_queue;
875 qe = q + tf->goto_queue_active;
876 for (; q < qe; ++q)
877 if (q->index < 0)
878 do_return_redirection (q, lab, NULL, &return_val);
879 else
880 do_goto_redirection (q, lab, NULL);
881
882 replace_goto_queue (tf);
883
884 lower_eh_constructs_1 (state, &finally);
885 append_to_statement_list (finally, tf->top_p);
886 }
887
888 /* A subroutine of lower_try_finally. We have determined that there is
889 exactly one destination of the finally block. Restructure the
890 try_finally node for this special case. */
891
892 static void
893 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
894 {
895 struct goto_queue_node *q, *qe;
896 tree x, finally, finally_label;
897
898 finally = TREE_OPERAND (*tf->top_p, 1);
899 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
900
901 lower_eh_constructs_1 (state, &finally);
902
903 if (tf->may_throw)
904 {
905 /* Only reachable via the exception edge. Add the given label to
906 the head of the FINALLY block. Append a RESX at the end. */
907
908 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
909 append_to_statement_list (x, tf->top_p);
910
911 append_to_statement_list (finally, tf->top_p);
912
913 x = build1 (RESX_EXPR, void_type_node,
914 build_int_2 (get_eh_region_number (tf->region), 0));
915 append_to_statement_list (x, tf->top_p);
916
917 return;
918 }
919
920 if (tf->may_fallthru)
921 {
922 /* Only reachable via the fallthru edge. Do nothing but let
923 the two blocks run together; we'll fall out the bottom. */
924 append_to_statement_list (finally, tf->top_p);
925 return;
926 }
927
928 finally_label = create_artificial_label ();
929 x = build1 (LABEL_EXPR, void_type_node, finally_label);
930 append_to_statement_list (x, tf->top_p);
931
932 append_to_statement_list (finally, tf->top_p);
933
934 q = tf->goto_queue;
935 qe = q + tf->goto_queue_active;
936
937 if (tf->may_return)
938 {
939 /* Reachable by return expressions only. Redirect them. */
940 tree return_val = NULL;
941 for (; q < qe; ++q)
942 do_return_redirection (q, finally_label, NULL, &return_val);
943 replace_goto_queue (tf);
944 }
945 else
946 {
947 /* Reachable by goto expressions only. Redirect them. */
948 for (; q < qe; ++q)
949 do_goto_redirection (q, finally_label, NULL);
950 replace_goto_queue (tf);
951
952 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
953 {
954 /* Reachable by goto to fallthru label only. Redirect it
955 to the new label (already created, sadly), and do not
956 emit the final branch out, or the fallthru label. */
957 tf->fallthru_label = NULL;
958 return;
959 }
960 }
961
962 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
963 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
964 }
965
966 /* A subroutine of lower_try_finally. There are multiple edges incoming
967 and outgoing from the finally block. Implement this by duplicating the
968 finally block for every destination. */
969
970 static void
971 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
972 {
973 tree finally, new_stmt;
974 tree x;
975
976 finally = TREE_OPERAND (*tf->top_p, 1);
977 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
978
979 new_stmt = NULL_TREE;
980
981 if (tf->may_fallthru)
982 {
983 x = lower_try_finally_dup_block (finally, state);
984 lower_eh_constructs_1 (state, &x);
985 append_to_statement_list (x, &new_stmt);
986
987 x = lower_try_finally_fallthru_label (tf);
988 x = build1 (GOTO_EXPR, void_type_node, x);
989 append_to_statement_list (x, &new_stmt);
990 }
991
992 if (tf->may_throw)
993 {
994 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
995 append_to_statement_list (x, &new_stmt);
996
997 x = lower_try_finally_dup_block (finally, state);
998 lower_eh_constructs_1 (state, &x);
999 append_to_statement_list (x, &new_stmt);
1000
1001 x = build1 (RESX_EXPR, void_type_node,
1002 build_int_2 (get_eh_region_number (tf->region), 0));
1003 append_to_statement_list (x, &new_stmt);
1004 }
1005
1006 if (tf->goto_queue)
1007 {
1008 struct goto_queue_node *q, *qe;
1009 tree return_val = NULL;
1010 int return_index;
1011 tree *labels;
1012
1013 if (tf->dest_array)
1014 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1015 else
1016 return_index = 0;
1017 labels = xcalloc (sizeof (tree), return_index + 1);
1018
1019 q = tf->goto_queue;
1020 qe = q + tf->goto_queue_active;
1021 for (; q < qe; q++)
1022 {
1023 int index = q->index < 0 ? return_index : q->index;
1024 tree lab = labels[index];
1025 bool build_p = false;
1026
1027 if (!lab)
1028 {
1029 labels[index] = lab = create_artificial_label ();
1030 build_p = true;
1031 }
1032
1033 if (index == return_index)
1034 do_return_redirection (q, lab, NULL, &return_val);
1035 else
1036 do_goto_redirection (q, lab, NULL);
1037
1038 if (build_p)
1039 {
1040 x = build1 (LABEL_EXPR, void_type_node, lab);
1041 append_to_statement_list (x, &new_stmt);
1042
1043 x = lower_try_finally_dup_block (finally, state);
1044 lower_eh_constructs_1 (state, &x);
1045 append_to_statement_list (x, &new_stmt);
1046
1047 append_to_statement_list (q->cont_stmt, &new_stmt);
1048 maybe_record_in_goto_queue (state, q->cont_stmt);
1049 }
1050 }
1051 replace_goto_queue (tf);
1052 free (labels);
1053 }
1054
1055 /* Need to link new stmts after running replace_goto_queue due
1056 to not wanting to process the same goto stmts twice. */
1057 append_to_statement_list (new_stmt, tf->top_p);
1058 }
1059
1060 /* A subroutine of lower_try_finally. There are multiple edges incoming
1061 and outgoing from the finally block. Implement this by instrumenting
1062 each incoming edge and creating a switch statement at the end of the
1063 finally block that branches to the appropriate destination. */
1064
1065 static void
1066 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1067 {
1068 struct goto_queue_node *q, *qe;
1069 tree return_val = NULL;
1070 tree finally, finally_tmp, finally_label;
1071 int return_index, eh_index, fallthru_index;
1072 int nlabels, ndests, j, last_case_index;
1073 tree case_label_vec, switch_stmt, last_case, switch_body;
1074 tree x;
1075
1076 /* Mash the TRY block to the head of the chain. */
1077 finally = TREE_OPERAND (*tf->top_p, 1);
1078 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1079
1080 /* Lower the finally block itself. */
1081 lower_eh_constructs_1 (state, &finally);
1082
1083 /* Prepare for switch statement generation. */
1084 if (tf->dest_array)
1085 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1086 else
1087 nlabels = 0;
1088 return_index = nlabels;
1089 eh_index = return_index + tf->may_return;
1090 fallthru_index = eh_index + tf->may_throw;
1091 ndests = fallthru_index + tf->may_fallthru;
1092
1093 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1094 finally_label = create_artificial_label ();
1095
1096 case_label_vec = make_tree_vec (ndests);
1097 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1098 NULL_TREE, case_label_vec);
1099 switch_body = NULL;
1100 last_case = NULL;
1101 last_case_index = 0;
1102
1103 /* Begin inserting code for getting to the finally block. Things
1104 are done in this order to correspond to the sequence the code is
1105 layed out. */
1106
1107 if (tf->may_fallthru)
1108 {
1109 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1110 build_int_2 (fallthru_index, 0));
1111 append_to_statement_list (x, tf->top_p);
1112
1113 if (tf->may_throw)
1114 {
1115 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1116 append_to_statement_list (x, tf->top_p);
1117 }
1118
1119
1120 last_case = build (CASE_LABEL_EXPR, void_type_node,
1121 build_int_2 (fallthru_index, 0), NULL,
1122 create_artificial_label ());
1123 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1124 last_case_index++;
1125
1126 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1127 append_to_statement_list (x, &switch_body);
1128
1129 x = lower_try_finally_fallthru_label (tf);
1130 x = build1 (GOTO_EXPR, void_type_node, x);
1131 append_to_statement_list (x, &switch_body);
1132 }
1133
1134 if (tf->may_throw)
1135 {
1136 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1137 append_to_statement_list (x, tf->top_p);
1138
1139 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1140 build_int_2 (eh_index, 0));
1141 append_to_statement_list (x, tf->top_p);
1142
1143 last_case = build (CASE_LABEL_EXPR, void_type_node,
1144 build_int_2 (eh_index, 0), NULL,
1145 create_artificial_label ());
1146 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1147 last_case_index++;
1148
1149 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1150 append_to_statement_list (x, &switch_body);
1151 x = build1 (RESX_EXPR, void_type_node,
1152 build_int_2 (get_eh_region_number (tf->region), 0));
1153 append_to_statement_list (x, &switch_body);
1154 }
1155
1156 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1157 append_to_statement_list (x, tf->top_p);
1158
1159 append_to_statement_list (finally, tf->top_p);
1160
1161 /* Redirect each incoming goto edge. */
1162 q = tf->goto_queue;
1163 qe = q + tf->goto_queue_active;
1164 j = last_case_index + tf->may_return;
1165 last_case_index += nlabels;
1166 for (; q < qe; ++q)
1167 {
1168 tree mod;
1169 int switch_id, case_index;
1170
1171 if (q->index < 0)
1172 {
1173 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1174 build_int_2 (return_index, 0));
1175 do_return_redirection (q, finally_label, mod, &return_val);
1176 switch_id = return_index;
1177 }
1178 else
1179 {
1180 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1181 build_int_2 (q->index, 0));
1182 do_goto_redirection (q, finally_label, mod);
1183 switch_id = q->index;
1184 }
1185
1186 case_index = j + q->index;
1187 if (!TREE_VEC_ELT (case_label_vec, case_index))
1188 {
1189 last_case = build (CASE_LABEL_EXPR, void_type_node,
1190 build_int_2 (switch_id, 0), NULL,
1191 create_artificial_label ());
1192 TREE_VEC_ELT (case_label_vec, case_index) = last_case;
1193
1194 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1195 append_to_statement_list (x, &switch_body);
1196 append_to_statement_list (q->cont_stmt, &switch_body);
1197 maybe_record_in_goto_queue (state, q->cont_stmt);
1198 }
1199 }
1200 replace_goto_queue (tf);
1201 last_case_index += nlabels;
1202
1203 /* Make sure that the last case is the default label, as one is required.
1204 Then sort the labels, which is also required in GIMPLE. */
1205 CASE_LOW (last_case) = NULL;
1206 sort_case_labels (case_label_vec);
1207
1208 /* Need to link switch_stmt after running replace_goto_queue due
1209 to not wanting to process the same goto stmts twice. */
1210 append_to_statement_list (switch_stmt, tf->top_p);
1211 append_to_statement_list (switch_body, tf->top_p);
1212 }
1213
1214 /* Decide whether or not we are going to duplicate the finally block.
1215 There are several considerations.
1216
1217 First, if this is Java, then the finally block contains code
1218 written by the user. It has line numbers associated with it,
1219 so duplicating the block means it's difficult to set a breakpoint.
1220 Since controlling code generation via -g is verboten, we simply
1221 never duplicate code without optimization.
1222
1223 Second, we'd like to prevent egregious code growth. One way to
1224 do this is to estimate the size of the finally block, multiply
1225 that by the number of copies we'd need to make, and compare against
1226 the estimate of the size of the switch machinery we'd have to add. */
1227
1228 static bool
1229 decide_copy_try_finally (int ndests, tree finally)
1230 {
1231 int f_estimate, sw_estimate;
1232
1233 if (!optimize)
1234 return false;
1235
1236 /* Finally estimate N times, plus N gotos. */
1237 f_estimate = estimate_num_insns (finally);
1238 f_estimate = (f_estimate + 1) * ndests;
1239
1240 /* Switch statement (cost 10), N variable assignments, N gotos. */
1241 sw_estimate = 10 + 2 * ndests;
1242
1243 /* Optimize for size clearly wants our best guess. */
1244 if (optimize_size)
1245 return f_estimate < sw_estimate;
1246
1247 /* ??? These numbers are completely made up so far. */
1248 if (optimize > 1)
1249 return f_estimate < 100 || f_estimate * 2 < sw_estimate;
1250 else
1251 return f_estimate < 40 || f_estimate * 3 < sw_estimate * 2;
1252 }
1253
1254 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1255 to a sequence of labels and blocks, plus the exception region trees
1256 that record all the magic. This is complicated by the need to
1257 arrange for the FINALLY block to be executed on all exits. */
1258
1259 static void
1260 lower_try_finally (struct leh_state *state, tree *tp)
1261 {
1262 struct leh_tf_state this_tf;
1263 struct leh_state this_state;
1264 int ndests;
1265
1266 /* Process the try block. */
1267
1268 memset (&this_tf, 0, sizeof (this_tf));
1269 this_tf.try_finally_expr = *tp;
1270 this_tf.top_p = tp;
1271 this_tf.outer = state;
1272 if (using_eh_for_cleanups_p)
1273 this_tf.region
1274 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1275 else
1276 this_tf.region = NULL;
1277
1278 this_state.cur_region = this_tf.region;
1279 this_state.prev_try = state->prev_try;
1280 this_state.tf = &this_tf;
1281
1282 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1283
1284 /* Determine if the try block is escaped through the bottom. */
1285 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1286
1287 /* Determine if any exceptions are possible within the try block. */
1288 if (using_eh_for_cleanups_p)
1289 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1290 if (this_tf.may_throw)
1291 {
1292 this_tf.eh_label = create_artificial_label ();
1293 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1294 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1295 }
1296
1297 /* Sort the goto queue for efficient searching later. */
1298 if (this_tf.goto_queue_active > 1)
1299 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1300 sizeof (struct goto_queue_node), goto_queue_cmp);
1301
1302 /* Determine how many edges (still) reach the finally block. Or rather,
1303 how many destinations are reached by the finally block. Use this to
1304 determine how we process the finally block itself. */
1305
1306 if (this_tf.dest_array)
1307 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1308 else
1309 ndests = 0;
1310 ndests += this_tf.may_fallthru;
1311 ndests += this_tf.may_return;
1312 ndests += this_tf.may_throw;
1313
1314 /* If the FINALLY block is not reachable, dike it out. */
1315 if (ndests == 0)
1316 *tp = TREE_OPERAND (*tp, 0);
1317
1318 /* If the finally block doesn't fall through, then any destination
1319 we might try to impose there isn't reached either. There may be
1320 some minor amount of cleanup and redirection still needed. */
1321 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1322 lower_try_finally_nofallthru (state, &this_tf);
1323
1324 /* We can easily special-case redirection to a single destination. */
1325 else if (ndests == 1)
1326 lower_try_finally_onedest (state, &this_tf);
1327
1328 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1329 lower_try_finally_copy (state, &this_tf);
1330 else
1331 lower_try_finally_switch (state, &this_tf);
1332
1333 /* If someone requested we add a label at the end of the transformed
1334 block, do so. */
1335 if (this_tf.fallthru_label)
1336 {
1337 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1338 append_to_statement_list (x, tp);
1339 }
1340
1341 if (this_tf.goto_queue)
1342 free (this_tf.goto_queue);
1343 }
1344
1345 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1346 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1347 exception region trees that record all the magic. */
1348
1349 static void
1350 lower_catch (struct leh_state *state, tree *tp)
1351 {
1352 struct eh_region *try_region;
1353 struct leh_state this_state;
1354 tree_stmt_iterator i;
1355 tree out_label;
1356
1357 try_region = gen_eh_region_try (state->cur_region);
1358 this_state.cur_region = try_region;
1359 this_state.prev_try = try_region;
1360 this_state.tf = state->tf;
1361
1362 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1363
1364 if (!get_eh_region_may_contain_throw (try_region))
1365 {
1366 *tp = TREE_OPERAND (*tp, 0);
1367 return;
1368 }
1369
1370 out_label = NULL;
1371 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1372 {
1373 struct eh_region *catch_region;
1374 tree catch, x, eh_label;
1375
1376 catch = tsi_stmt (i);
1377 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1378
1379 this_state.cur_region = catch_region;
1380 this_state.prev_try = state->prev_try;
1381 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1382
1383 eh_label = create_artificial_label ();
1384 set_eh_region_tree_label (catch_region, eh_label);
1385
1386 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1387 tsi_link_before (&i, x, TSI_SAME_STMT);
1388
1389 if (block_may_fallthru (CATCH_BODY (catch)))
1390 {
1391 if (!out_label)
1392 out_label = create_artificial_label ();
1393
1394 x = build1 (GOTO_EXPR, void_type_node, out_label);
1395 append_to_statement_list (x, &CATCH_BODY (catch));
1396 }
1397
1398 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1399 tsi_delink (&i);
1400 }
1401
1402 frob_into_branch_around (tp, NULL, out_label);
1403 }
1404
1405 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1406 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1407 region trees that record all the magic. */
1408
1409 static void
1410 lower_eh_filter (struct leh_state *state, tree *tp)
1411 {
1412 struct leh_state this_state;
1413 struct eh_region *this_region;
1414 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1415 tree eh_label;
1416
1417 if (EH_FILTER_MUST_NOT_THROW (inner))
1418 this_region = gen_eh_region_must_not_throw (state->cur_region);
1419 else
1420 this_region = gen_eh_region_allowed (state->cur_region,
1421 EH_FILTER_TYPES (inner));
1422 this_state = *state;
1423 this_state.cur_region = this_region;
1424
1425 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1426
1427 if (!get_eh_region_may_contain_throw (this_region))
1428 {
1429 *tp = TREE_OPERAND (*tp, 0);
1430 return;
1431 }
1432
1433 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1434 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1435
1436 eh_label = create_artificial_label ();
1437 set_eh_region_tree_label (this_region, eh_label);
1438
1439 frob_into_branch_around (tp, eh_label, NULL);
1440 }
1441
1442 /* Implement a cleanup expression. This is similar to try-finally,
1443 except that we only execute the cleanup block for exception edges. */
1444
1445 static void
1446 lower_cleanup (struct leh_state *state, tree *tp)
1447 {
1448 struct leh_state this_state;
1449 struct eh_region *this_region;
1450 struct leh_tf_state fake_tf;
1451
1452 /* If not using eh, then exception-only cleanups are no-ops. */
1453 if (!flag_exceptions)
1454 {
1455 *tp = TREE_OPERAND (*tp, 0);
1456 lower_eh_constructs_1 (state, tp);
1457 return;
1458 }
1459
1460 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1461 this_state = *state;
1462 this_state.cur_region = this_region;
1463
1464 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1465
1466 if (!get_eh_region_may_contain_throw (this_region))
1467 {
1468 *tp = TREE_OPERAND (*tp, 0);
1469 return;
1470 }
1471
1472 /* Build enough of a try-finally state so that we can reuse
1473 honor_protect_cleanup_actions. */
1474 memset (&fake_tf, 0, sizeof (fake_tf));
1475 fake_tf.top_p = tp;
1476 fake_tf.outer = state;
1477 fake_tf.region = this_region;
1478 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1479 fake_tf.may_throw = true;
1480
1481 fake_tf.eh_label = create_artificial_label ();
1482 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1483
1484 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1485
1486 if (fake_tf.may_throw)
1487 {
1488 /* In this case honor_protect_cleanup_actions had nothing to do,
1489 and we should process this normally. */
1490 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1491 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1492 }
1493 else
1494 {
1495 /* In this case honor_protect_cleanup_actions did nearly all of
1496 the work. All we have left is to append the fallthru_label. */
1497
1498 *tp = TREE_OPERAND (*tp, 0);
1499 if (fake_tf.fallthru_label)
1500 {
1501 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1502 append_to_statement_list (x, tp);
1503 }
1504 }
1505 }
1506
1507 /* Main loop for lowering eh constructs. */
1508
1509 static void
1510 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1511 {
1512 tree_stmt_iterator i;
1513 tree t = *tp;
1514
1515 switch (TREE_CODE (t))
1516 {
1517 case COND_EXPR:
1518 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1519 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1520 break;
1521
1522 case CALL_EXPR:
1523 /* Look for things that can throw exceptions, and record them. */
1524 if (state->cur_region && tree_could_throw_p (t))
1525 {
1526 record_stmt_eh_region (state->cur_region, t);
1527 note_eh_region_may_contain_throw (state->cur_region);
1528 }
1529 break;
1530
1531 case MODIFY_EXPR:
1532 /* Look for things that can throw exceptions, and record them. */
1533 if (state->cur_region && tree_could_throw_p (t))
1534 {
1535 record_stmt_eh_region (state->cur_region, t);
1536 note_eh_region_may_contain_throw (state->cur_region);
1537
1538 /* ??? For the benefit of calls.c, converting all this to rtl,
1539 we need to record the call expression, not just the outer
1540 modify statement. */
1541 if (TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR)
1542 record_stmt_eh_region (state->cur_region, TREE_OPERAND (t, 1));
1543 }
1544 break;
1545
1546 case GOTO_EXPR:
1547 case RETURN_EXPR:
1548 maybe_record_in_goto_queue (state, t);
1549 break;
1550 case SWITCH_EXPR:
1551 verify_norecord_switch_expr (state, t);
1552 break;
1553
1554 case TRY_FINALLY_EXPR:
1555 lower_try_finally (state, tp);
1556 break;
1557
1558 case TRY_CATCH_EXPR:
1559 i = tsi_start (TREE_OPERAND (t, 1));
1560 switch (TREE_CODE (tsi_stmt (i)))
1561 {
1562 case CATCH_EXPR:
1563 lower_catch (state, tp);
1564 break;
1565 case EH_FILTER_EXPR:
1566 lower_eh_filter (state, tp);
1567 break;
1568 default:
1569 lower_cleanup (state, tp);
1570 break;
1571 }
1572 break;
1573
1574 case STATEMENT_LIST:
1575 for (i = tsi_start (t); !tsi_end_p (i); )
1576 {
1577 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1578 t = tsi_stmt (i);
1579 if (TREE_CODE (t) == STATEMENT_LIST)
1580 {
1581 tsi_link_before (&i, t, TSI_SAME_STMT);
1582 tsi_delink (&i);
1583 }
1584 else
1585 tsi_next (&i);
1586 }
1587 break;
1588
1589 default:
1590 /* A type, a decl, or some kind of statement that we're not
1591 interested in. Don't walk them. */
1592 break;
1593 }
1594 }
1595
1596 static void
1597 lower_eh_constructs (void)
1598 {
1599 struct leh_state null_state;
1600 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1601
1602 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1603 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq, free);
1604
1605 collect_finally_tree (*tp, NULL);
1606
1607 memset (&null_state, 0, sizeof (null_state));
1608 lower_eh_constructs_1 (&null_state, tp);
1609
1610 htab_delete (finally_tree);
1611
1612 collect_eh_region_array ();
1613 }
1614
1615 struct tree_opt_pass pass_lower_eh =
1616 {
1617 "eh", /* name */
1618 NULL, /* gate */
1619 lower_eh_constructs, /* execute */
1620 NULL, /* sub */
1621 NULL, /* next */
1622 0, /* static_pass_number */
1623 TV_TREE_EH, /* tv_id */
1624 PROP_gimple_lcf, /* properties_required */
1625 PROP_gimple_leh, /* properties_provided */
1626 PROP_gimple_lcf, /* properties_destroyed */
1627 0, /* todo_flags_start */
1628 TODO_dump_func /* todo_flags_finish */
1629 };
1630
1631 \f
1632 /* Construct EH edges for STMT. */
1633
1634 static void
1635 make_eh_edge (struct eh_region *region, void *data)
1636 {
1637 tree stmt, lab;
1638 basic_block src, dst;
1639
1640 stmt = data;
1641 lab = get_eh_region_tree_label (region);
1642
1643 src = bb_for_stmt (stmt);
1644 dst = label_to_block (lab);
1645
1646 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1647 }
1648
1649 void
1650 make_eh_edges (tree stmt)
1651 {
1652 int region_nr;
1653 bool is_resx;
1654
1655 if (TREE_CODE (stmt) == RESX_EXPR)
1656 {
1657 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1658 is_resx = true;
1659 }
1660 else
1661 {
1662 region_nr = lookup_stmt_eh_region (stmt);
1663 if (region_nr < 0)
1664 return;
1665 is_resx = false;
1666 }
1667
1668 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1669 }
1670
1671
1672 \f
1673 /* Return true if the expr can trap, as in dereferencing an
1674 invalid pointer location. */
1675
1676 bool
1677 tree_could_trap_p (tree expr)
1678 {
1679 enum tree_code code = TREE_CODE (expr);
1680 tree t;
1681
1682 switch (code)
1683 {
1684 case ARRAY_REF:
1685 case COMPONENT_REF:
1686 case REALPART_EXPR:
1687 case IMAGPART_EXPR:
1688 case BIT_FIELD_REF:
1689 t = get_base_address (expr);
1690 return !t || TREE_CODE (t) == INDIRECT_REF;
1691
1692 case INDIRECT_REF:
1693 case TRUNC_DIV_EXPR:
1694 case CEIL_DIV_EXPR:
1695 case FLOOR_DIV_EXPR:
1696 case ROUND_DIV_EXPR:
1697 case EXACT_DIV_EXPR:
1698 case CEIL_MOD_EXPR:
1699 case FLOOR_MOD_EXPR:
1700 case ROUND_MOD_EXPR:
1701 case TRUNC_MOD_EXPR:
1702 return true;
1703
1704 default:
1705 break;
1706 }
1707
1708 return false;
1709 }
1710
1711
1712 bool
1713 tree_could_throw_p (tree t)
1714 {
1715 if (!flag_exceptions)
1716 return false;
1717 if (TREE_CODE (t) == MODIFY_EXPR)
1718 {
1719 if (flag_non_call_exceptions
1720 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1721 return true;
1722 t = TREE_OPERAND (t, 1);
1723 }
1724
1725 if (TREE_CODE (t) == CALL_EXPR)
1726 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1727 if (flag_non_call_exceptions)
1728 return tree_could_trap_p (t);
1729 return false;
1730 }
1731
1732 bool
1733 tree_can_throw_internal (tree stmt)
1734 {
1735 int region_nr = lookup_stmt_eh_region (stmt);
1736 if (region_nr < 0)
1737 return false;
1738 return can_throw_internal_1 (region_nr);
1739 }
1740
1741 bool
1742 tree_can_throw_external (tree stmt)
1743 {
1744 int region_nr = lookup_stmt_eh_region (stmt);
1745 if (region_nr < 0)
1746 return false;
1747 return can_throw_external_1 (region_nr);
1748 }
1749
1750 #include "gt-tree-eh.h"