except.c (duplicate_eh_region_1, [...]): New functions, based on old implementation...
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39
40 \f
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
43
44 void
45 using_eh_for_cleanups (void)
46 {
47 using_eh_for_cleanups_p = 1;
48 }
49 \f
50 /* Misc functions used in this file. */
51
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
55
56 static int
57 struct_ptr_eq (const void *a, const void *b)
58 {
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
62 }
63
64 static hashval_t
65 struct_ptr_hash (const void *a)
66 {
67 const void * const * x = a;
68 return (size_t)*x >> 4;
69 }
70
71 \f
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
75
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
79
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
83
84 static void
85 record_stmt_eh_region (struct eh_region *region, tree t)
86 {
87 struct throw_stmt_node *n;
88 void **slot;
89
90 if (!region)
91 return;
92
93 n = ggc_alloc (sizeof (*n));
94 n->stmt = t;
95 n->region_nr = get_eh_region_number (region);
96
97 slot = htab_find_slot (get_eh_throw_stmt_table (cfun), n, INSERT);
98 gcc_assert (!*slot);
99 *slot = n;
100 }
101
102 void
103 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
104 {
105 struct throw_stmt_node *n;
106 void **slot;
107
108 gcc_assert (num >= 0);
109 gcc_assert (TREE_CODE (t) != RESX_EXPR);
110
111 n = ggc_alloc (sizeof (*n));
112 n->stmt = t;
113 n->region_nr = num;
114
115 if (!get_eh_throw_stmt_table (ifun))
116 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
117 struct_ptr_eq,
118 ggc_free));
119
120 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
121 gcc_assert (!*slot);
122 *slot = n;
123 }
124
125 void
126 add_stmt_to_eh_region (tree t, int num)
127 {
128 add_stmt_to_eh_region_fn (cfun, t, num);
129 }
130
131 bool
132 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
133 {
134 struct throw_stmt_node dummy;
135 void **slot;
136
137 if (!get_eh_throw_stmt_table (ifun))
138 return false;
139
140 dummy.stmt = t;
141 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
142 NO_INSERT);
143 if (slot)
144 {
145 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
146 return true;
147 }
148 else
149 return false;
150 }
151
152 bool
153 remove_stmt_from_eh_region (tree t)
154 {
155 return remove_stmt_from_eh_region_fn (cfun, t);
156 }
157
158 int
159 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
160 {
161 struct throw_stmt_node *p, n;
162
163 if (!get_eh_throw_stmt_table (ifun))
164 return -2;
165
166 n.stmt = t;
167 p = htab_find (get_eh_throw_stmt_table (ifun), &n);
168
169 return (p ? p->region_nr : -1);
170 }
171
172 int
173 lookup_stmt_eh_region (tree t)
174 {
175 /* We can get called from initialized data when -fnon-call-exceptions
176 is on; prevent crash. */
177 if (!cfun)
178 return -1;
179 return lookup_stmt_eh_region_fn (cfun, t);
180 }
181
182 \f
183 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
184 nodes and LABEL_DECL nodes. We will use this during the second phase to
185 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
186
187 struct finally_tree_node
188 {
189 tree child, parent;
190 };
191
192 /* Note that this table is *not* marked GTY. It is short-lived. */
193 static htab_t finally_tree;
194
195 static void
196 record_in_finally_tree (tree child, tree parent)
197 {
198 struct finally_tree_node *n;
199 void **slot;
200
201 n = xmalloc (sizeof (*n));
202 n->child = child;
203 n->parent = parent;
204
205 slot = htab_find_slot (finally_tree, n, INSERT);
206 gcc_assert (!*slot);
207 *slot = n;
208 }
209
210 static void
211 collect_finally_tree (tree t, tree region)
212 {
213 tailrecurse:
214 switch (TREE_CODE (t))
215 {
216 case LABEL_EXPR:
217 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
218 break;
219
220 case TRY_FINALLY_EXPR:
221 record_in_finally_tree (t, region);
222 collect_finally_tree (TREE_OPERAND (t, 0), t);
223 t = TREE_OPERAND (t, 1);
224 goto tailrecurse;
225
226 case TRY_CATCH_EXPR:
227 collect_finally_tree (TREE_OPERAND (t, 0), region);
228 t = TREE_OPERAND (t, 1);
229 goto tailrecurse;
230
231 case CATCH_EXPR:
232 t = CATCH_BODY (t);
233 goto tailrecurse;
234
235 case EH_FILTER_EXPR:
236 t = EH_FILTER_FAILURE (t);
237 goto tailrecurse;
238
239 case STATEMENT_LIST:
240 {
241 tree_stmt_iterator i;
242 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
243 collect_finally_tree (tsi_stmt (i), region);
244 }
245 break;
246
247 default:
248 /* A type, a decl, or some kind of statement that we're not
249 interested in. Don't walk them. */
250 break;
251 }
252 }
253
254 /* Use the finally tree to determine if a jump from START to TARGET
255 would leave the try_finally node that START lives in. */
256
257 static bool
258 outside_finally_tree (tree start, tree target)
259 {
260 struct finally_tree_node n, *p;
261
262 do
263 {
264 n.child = start;
265 p = htab_find (finally_tree, &n);
266 if (!p)
267 return true;
268 start = p->parent;
269 }
270 while (start != target);
271
272 return false;
273 }
274 \f
275 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
276 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
277 The eh region creation is straight-forward, but frobbing all the gotos
278 and such into shape isn't. */
279
280 /* State of the world while lowering. */
281
282 struct leh_state
283 {
284 /* What's "current" while constructing the eh region tree. These
285 correspond to variables of the same name in cfun->eh, which we
286 don't have easy access to. */
287 struct eh_region *cur_region;
288 struct eh_region *prev_try;
289
290 /* Processing of TRY_FINALLY requires a bit more state. This is
291 split out into a separate structure so that we don't have to
292 copy so much when processing other nodes. */
293 struct leh_tf_state *tf;
294 };
295
296 struct leh_tf_state
297 {
298 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
299 is the original TRY_FINALLY_EXPR. We need to retain this so that
300 outside_finally_tree can reliably reference the tree used in the
301 collect_finally_tree data structures. */
302 tree try_finally_expr;
303 tree *top_p;
304
305 /* The state outside this try_finally node. */
306 struct leh_state *outer;
307
308 /* The exception region created for it. */
309 struct eh_region *region;
310
311 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
312 that are seen to escape this TRY_FINALLY_EXPR node. */
313 struct goto_queue_node {
314 tree stmt;
315 tree repl_stmt;
316 tree cont_stmt;
317 int index;
318 } *goto_queue;
319 size_t goto_queue_size;
320 size_t goto_queue_active;
321
322 /* The set of unique labels seen as entries in the goto queue. */
323 varray_type dest_array;
324
325 /* A label to be added at the end of the completed transformed
326 sequence. It will be set if may_fallthru was true *at one time*,
327 though subsequent transformations may have cleared that flag. */
328 tree fallthru_label;
329
330 /* A label that has been registered with except.c to be the
331 landing pad for this try block. */
332 tree eh_label;
333
334 /* True if it is possible to fall out the bottom of the try block.
335 Cleared if the fallthru is converted to a goto. */
336 bool may_fallthru;
337
338 /* True if any entry in goto_queue is a RETURN_EXPR. */
339 bool may_return;
340
341 /* True if the finally block can receive an exception edge.
342 Cleared if the exception case is handled by code duplication. */
343 bool may_throw;
344 };
345
346 static void lower_eh_filter (struct leh_state *, tree *);
347 static void lower_eh_constructs_1 (struct leh_state *, tree *);
348
349 /* Comparison function for qsort/bsearch. We're interested in
350 searching goto queue elements for source statements. */
351
352 static int
353 goto_queue_cmp (const void *x, const void *y)
354 {
355 tree a = ((const struct goto_queue_node *)x)->stmt;
356 tree b = ((const struct goto_queue_node *)y)->stmt;
357 return (a == b ? 0 : a < b ? -1 : 1);
358 }
359
360 /* Search for STMT in the goto queue. Return the replacement,
361 or null if the statement isn't in the queue. */
362
363 static tree
364 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
365 {
366 struct goto_queue_node tmp, *ret;
367 tmp.stmt = stmt;
368 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
369 sizeof (struct goto_queue_node), goto_queue_cmp);
370 return (ret ? ret->repl_stmt : NULL);
371 }
372
373 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
374 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
375 then we can just splat it in, otherwise we add the new stmts immediately
376 after the COND_EXPR and redirect. */
377
378 static void
379 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
380 tree_stmt_iterator *tsi)
381 {
382 tree new, one, label;
383
384 new = find_goto_replacement (tf, *tp);
385 if (!new)
386 return;
387
388 one = expr_only (new);
389 if (one && TREE_CODE (one) == GOTO_EXPR)
390 {
391 *tp = one;
392 return;
393 }
394
395 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
396 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
397
398 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
399 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
400 }
401
402 /* The real work of replace_goto_queue. Returns with TSI updated to
403 point to the next statement. */
404
405 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
406
407 static void
408 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
409 {
410 switch (TREE_CODE (t))
411 {
412 case GOTO_EXPR:
413 case RETURN_EXPR:
414 t = find_goto_replacement (tf, t);
415 if (t)
416 {
417 tsi_link_before (tsi, t, TSI_SAME_STMT);
418 tsi_delink (tsi);
419 return;
420 }
421 break;
422
423 case COND_EXPR:
424 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
425 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
426 break;
427
428 case TRY_FINALLY_EXPR:
429 case TRY_CATCH_EXPR:
430 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
431 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
432 break;
433 case CATCH_EXPR:
434 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
435 break;
436 case EH_FILTER_EXPR:
437 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
438 break;
439
440 case STATEMENT_LIST:
441 gcc_unreachable ();
442
443 default:
444 /* These won't have gotos in them. */
445 break;
446 }
447
448 tsi_next (tsi);
449 }
450
451 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
452
453 static void
454 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
455 {
456 tree_stmt_iterator i = tsi_start (t);
457 while (!tsi_end_p (i))
458 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
459 }
460
461 /* Replace all goto queue members. */
462
463 static void
464 replace_goto_queue (struct leh_tf_state *tf)
465 {
466 if (tf->goto_queue_active == 0)
467 return;
468 replace_goto_queue_stmt_list (*tf->top_p, tf);
469 }
470
471 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
472 node, and if so record that fact in the goto queue associated with that
473 try_finally node. */
474
475 static void
476 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
477 {
478 struct leh_tf_state *tf = state->tf;
479 struct goto_queue_node *q;
480 size_t active, size;
481 int index;
482
483 if (!tf)
484 return;
485
486 switch (TREE_CODE (stmt))
487 {
488 case GOTO_EXPR:
489 {
490 tree lab = GOTO_DESTINATION (stmt);
491
492 /* Computed and non-local gotos do not get processed. Given
493 their nature we can neither tell whether we've escaped the
494 finally block nor redirect them if we knew. */
495 if (TREE_CODE (lab) != LABEL_DECL)
496 return;
497
498 /* No need to record gotos that don't leave the try block. */
499 if (! outside_finally_tree (lab, tf->try_finally_expr))
500 return;
501
502 if (! tf->dest_array)
503 {
504 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
505 VARRAY_PUSH_TREE (tf->dest_array, lab);
506 index = 0;
507 }
508 else
509 {
510 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
511 for (index = 0; index < n; ++index)
512 if (VARRAY_TREE (tf->dest_array, index) == lab)
513 break;
514 if (index == n)
515 VARRAY_PUSH_TREE (tf->dest_array, lab);
516 }
517 }
518 break;
519
520 case RETURN_EXPR:
521 tf->may_return = true;
522 index = -1;
523 break;
524
525 default:
526 gcc_unreachable ();
527 }
528
529 active = tf->goto_queue_active;
530 size = tf->goto_queue_size;
531 if (active >= size)
532 {
533 size = (size ? size * 2 : 32);
534 tf->goto_queue_size = size;
535 tf->goto_queue
536 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
537 }
538
539 q = &tf->goto_queue[active];
540 tf->goto_queue_active = active + 1;
541
542 memset (q, 0, sizeof (*q));
543 q->stmt = stmt;
544 q->index = index;
545 }
546
547 #ifdef ENABLE_CHECKING
548 /* We do not process SWITCH_EXPRs for now. As long as the original source
549 was in fact structured, and we've not yet done jump threading, then none
550 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
551
552 static void
553 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
554 {
555 struct leh_tf_state *tf = state->tf;
556 size_t i, n;
557 tree vec;
558
559 if (!tf)
560 return;
561
562 vec = SWITCH_LABELS (switch_expr);
563 n = TREE_VEC_LENGTH (vec);
564
565 for (i = 0; i < n; ++i)
566 {
567 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
568 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
569 }
570 }
571 #else
572 #define verify_norecord_switch_expr(state, switch_expr)
573 #endif
574
575 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
576 whatever is needed to finish the return. If MOD is non-null, insert it
577 before the new branch. RETURN_VALUE_P is a cache containing a temporary
578 variable to be used in manipulating the value returned from the function. */
579
580 static void
581 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
582 tree *return_value_p)
583 {
584 tree ret_expr = TREE_OPERAND (q->stmt, 0);
585 tree x;
586
587 if (ret_expr)
588 {
589 /* The nasty part about redirecting the return value is that the
590 return value itself is to be computed before the FINALLY block
591 is executed. e.g.
592
593 int x;
594 int foo (void)
595 {
596 x = 0;
597 try {
598 return x;
599 } finally {
600 x++;
601 }
602 }
603
604 should return 0, not 1. Arrange for this to happen by copying
605 computed the return value into a local temporary. This also
606 allows us to redirect multiple return statements through the
607 same destination block; whether this is a net win or not really
608 depends, I guess, but it does make generation of the switch in
609 lower_try_finally_switch easier. */
610
611 switch (TREE_CODE (ret_expr))
612 {
613 case RESULT_DECL:
614 if (!*return_value_p)
615 *return_value_p = ret_expr;
616 else
617 gcc_assert (*return_value_p == ret_expr);
618 q->cont_stmt = q->stmt;
619 break;
620
621 case MODIFY_EXPR:
622 {
623 tree result = TREE_OPERAND (ret_expr, 0);
624 tree new, old = TREE_OPERAND (ret_expr, 1);
625
626 if (!*return_value_p)
627 {
628 if (aggregate_value_p (TREE_TYPE (result),
629 TREE_TYPE (current_function_decl)))
630 /* If this function returns in memory, copy the argument
631 into the return slot now. Otherwise, we might need to
632 worry about magic return semantics, so we need to use a
633 temporary to hold the value until we're actually ready
634 to return. */
635 new = result;
636 else
637 new = create_tmp_var (TREE_TYPE (old), "rettmp");
638 *return_value_p = new;
639 }
640 else
641 new = *return_value_p;
642
643 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
644 append_to_statement_list (x, &q->repl_stmt);
645
646 if (new == result)
647 x = result;
648 else
649 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
650 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
651 }
652
653 default:
654 gcc_unreachable ();
655 }
656 }
657 else
658 {
659 /* If we don't return a value, all return statements are the same. */
660 q->cont_stmt = q->stmt;
661 }
662
663 if (mod)
664 append_to_statement_list (mod, &q->repl_stmt);
665
666 x = build1 (GOTO_EXPR, void_type_node, finlab);
667 append_to_statement_list (x, &q->repl_stmt);
668 }
669
670 /* Similar, but easier, for GOTO_EXPR. */
671
672 static void
673 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
674 {
675 tree x;
676
677 q->cont_stmt = q->stmt;
678 if (mod)
679 append_to_statement_list (mod, &q->repl_stmt);
680
681 x = build1 (GOTO_EXPR, void_type_node, finlab);
682 append_to_statement_list (x, &q->repl_stmt);
683 }
684
685 /* We want to transform
686 try { body; } catch { stuff; }
687 to
688 body; goto over; lab: stuff; over:
689
690 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
691 should be placed before the second operand, or NULL. OVER is
692 an existing label that should be put at the exit, or NULL. */
693
694 static void
695 frob_into_branch_around (tree *tp, tree lab, tree over)
696 {
697 tree x, op1;
698
699 op1 = TREE_OPERAND (*tp, 1);
700 *tp = TREE_OPERAND (*tp, 0);
701
702 if (block_may_fallthru (*tp))
703 {
704 if (!over)
705 over = create_artificial_label ();
706 x = build1 (GOTO_EXPR, void_type_node, over);
707 append_to_statement_list (x, tp);
708 }
709
710 if (lab)
711 {
712 x = build1 (LABEL_EXPR, void_type_node, lab);
713 append_to_statement_list (x, tp);
714 }
715
716 append_to_statement_list (op1, tp);
717
718 if (over)
719 {
720 x = build1 (LABEL_EXPR, void_type_node, over);
721 append_to_statement_list (x, tp);
722 }
723 }
724
725 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
726 Make sure to record all new labels found. */
727
728 static tree
729 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
730 {
731 tree region = NULL;
732
733 t = unsave_expr_now (t);
734
735 if (outer_state->tf)
736 region = outer_state->tf->try_finally_expr;
737 collect_finally_tree (t, region);
738
739 return t;
740 }
741
742 /* A subroutine of lower_try_finally. Create a fallthru label for
743 the given try_finally state. The only tricky bit here is that
744 we have to make sure to record the label in our outer context. */
745
746 static tree
747 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
748 {
749 tree label = tf->fallthru_label;
750 if (!label)
751 {
752 label = create_artificial_label ();
753 tf->fallthru_label = label;
754 if (tf->outer->tf)
755 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
756 }
757 return label;
758 }
759
760 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
761 returns non-null, then the language requires that the exception path out
762 of a try_finally be treated specially. To wit: the code within the
763 finally block may not itself throw an exception. We have two choices here.
764 First we can duplicate the finally block and wrap it in a must_not_throw
765 region. Second, we can generate code like
766
767 try {
768 finally_block;
769 } catch {
770 if (fintmp == eh_edge)
771 protect_cleanup_actions;
772 }
773
774 where "fintmp" is the temporary used in the switch statement generation
775 alternative considered below. For the nonce, we always choose the first
776 option.
777
778 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
779
780 static void
781 honor_protect_cleanup_actions (struct leh_state *outer_state,
782 struct leh_state *this_state,
783 struct leh_tf_state *tf)
784 {
785 tree protect_cleanup_actions, finally, x;
786 tree_stmt_iterator i;
787 bool finally_may_fallthru;
788
789 /* First check for nothing to do. */
790 if (lang_protect_cleanup_actions)
791 protect_cleanup_actions = lang_protect_cleanup_actions ();
792 else
793 protect_cleanup_actions = NULL;
794
795 finally = TREE_OPERAND (*tf->top_p, 1);
796
797 /* If the EH case of the finally block can fall through, this may be a
798 structure of the form
799 try {
800 try {
801 throw ...;
802 } cleanup {
803 try {
804 throw ...;
805 } catch (...) {
806 }
807 }
808 } catch (...) {
809 yyy;
810 }
811 E.g. with an inline destructor with an embedded try block. In this
812 case we must save the runtime EH data around the nested exception.
813
814 This complication means that any time the previous runtime data might
815 be used (via fallthru from the finally) we handle the eh case here,
816 whether or not protect_cleanup_actions is active. */
817
818 finally_may_fallthru = block_may_fallthru (finally);
819 if (!finally_may_fallthru && !protect_cleanup_actions)
820 return;
821
822 /* Duplicate the FINALLY block. Only need to do this for try-finally,
823 and not for cleanups. */
824 if (this_state)
825 finally = lower_try_finally_dup_block (finally, outer_state);
826
827 /* Resume execution after the exception. Adding this now lets
828 lower_eh_filter not add unnecessary gotos, as it is clear that
829 we never fallthru from this copy of the finally block. */
830 if (finally_may_fallthru)
831 {
832 tree save_eptr, save_filt;
833
834 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
835 save_filt = create_tmp_var (integer_type_node, "save_filt");
836
837 i = tsi_start (finally);
838 x = build (EXC_PTR_EXPR, ptr_type_node);
839 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
840 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
841
842 x = build (FILTER_EXPR, integer_type_node);
843 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
844 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
845
846 i = tsi_last (finally);
847 x = build (EXC_PTR_EXPR, ptr_type_node);
848 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
849 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
850
851 x = build (FILTER_EXPR, integer_type_node);
852 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
853 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
854
855 x = build_resx (get_eh_region_number (tf->region));
856 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
857 }
858
859 /* Wrap the block with protect_cleanup_actions as the action. */
860 if (protect_cleanup_actions)
861 {
862 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
863 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
864 EH_FILTER_MUST_NOT_THROW (x) = 1;
865 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
866 lower_eh_filter (outer_state, &finally);
867 }
868 else
869 lower_eh_constructs_1 (outer_state, &finally);
870
871 /* Hook this up to the end of the existing try block. If we
872 previously fell through the end, we'll have to branch around.
873 This means adding a new goto, and adding it to the queue. */
874
875 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
876
877 if (tf->may_fallthru)
878 {
879 x = lower_try_finally_fallthru_label (tf);
880 x = build1 (GOTO_EXPR, void_type_node, x);
881 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
882
883 if (this_state)
884 maybe_record_in_goto_queue (this_state, x);
885
886 tf->may_fallthru = false;
887 }
888
889 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
890 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
891 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
892
893 /* Having now been handled, EH isn't to be considered with
894 the rest of the outgoing edges. */
895 tf->may_throw = false;
896 }
897
898 /* A subroutine of lower_try_finally. We have determined that there is
899 no fallthru edge out of the finally block. This means that there is
900 no outgoing edge corresponding to any incoming edge. Restructure the
901 try_finally node for this special case. */
902
903 static void
904 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
905 {
906 tree x, finally, lab, return_val;
907 struct goto_queue_node *q, *qe;
908
909 if (tf->may_throw)
910 lab = tf->eh_label;
911 else
912 lab = create_artificial_label ();
913
914 finally = TREE_OPERAND (*tf->top_p, 1);
915 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
916
917 x = build1 (LABEL_EXPR, void_type_node, lab);
918 append_to_statement_list (x, tf->top_p);
919
920 return_val = NULL;
921 q = tf->goto_queue;
922 qe = q + tf->goto_queue_active;
923 for (; q < qe; ++q)
924 if (q->index < 0)
925 do_return_redirection (q, lab, NULL, &return_val);
926 else
927 do_goto_redirection (q, lab, NULL);
928
929 replace_goto_queue (tf);
930
931 lower_eh_constructs_1 (state, &finally);
932 append_to_statement_list (finally, tf->top_p);
933 }
934
935 /* A subroutine of lower_try_finally. We have determined that there is
936 exactly one destination of the finally block. Restructure the
937 try_finally node for this special case. */
938
939 static void
940 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
941 {
942 struct goto_queue_node *q, *qe;
943 tree x, finally, finally_label;
944
945 finally = TREE_OPERAND (*tf->top_p, 1);
946 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
947
948 lower_eh_constructs_1 (state, &finally);
949
950 if (tf->may_throw)
951 {
952 /* Only reachable via the exception edge. Add the given label to
953 the head of the FINALLY block. Append a RESX at the end. */
954
955 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
956 append_to_statement_list (x, tf->top_p);
957
958 append_to_statement_list (finally, tf->top_p);
959
960 x = build_resx (get_eh_region_number (tf->region));
961
962 append_to_statement_list (x, tf->top_p);
963
964 return;
965 }
966
967 if (tf->may_fallthru)
968 {
969 /* Only reachable via the fallthru edge. Do nothing but let
970 the two blocks run together; we'll fall out the bottom. */
971 append_to_statement_list (finally, tf->top_p);
972 return;
973 }
974
975 finally_label = create_artificial_label ();
976 x = build1 (LABEL_EXPR, void_type_node, finally_label);
977 append_to_statement_list (x, tf->top_p);
978
979 append_to_statement_list (finally, tf->top_p);
980
981 q = tf->goto_queue;
982 qe = q + tf->goto_queue_active;
983
984 if (tf->may_return)
985 {
986 /* Reachable by return expressions only. Redirect them. */
987 tree return_val = NULL;
988 for (; q < qe; ++q)
989 do_return_redirection (q, finally_label, NULL, &return_val);
990 replace_goto_queue (tf);
991 }
992 else
993 {
994 /* Reachable by goto expressions only. Redirect them. */
995 for (; q < qe; ++q)
996 do_goto_redirection (q, finally_label, NULL);
997 replace_goto_queue (tf);
998
999 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
1000 {
1001 /* Reachable by goto to fallthru label only. Redirect it
1002 to the new label (already created, sadly), and do not
1003 emit the final branch out, or the fallthru label. */
1004 tf->fallthru_label = NULL;
1005 return;
1006 }
1007 }
1008
1009 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1010 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1011 }
1012
1013 /* A subroutine of lower_try_finally. There are multiple edges incoming
1014 and outgoing from the finally block. Implement this by duplicating the
1015 finally block for every destination. */
1016
1017 static void
1018 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1019 {
1020 tree finally, new_stmt;
1021 tree x;
1022
1023 finally = TREE_OPERAND (*tf->top_p, 1);
1024 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1025
1026 new_stmt = NULL_TREE;
1027
1028 if (tf->may_fallthru)
1029 {
1030 x = lower_try_finally_dup_block (finally, state);
1031 lower_eh_constructs_1 (state, &x);
1032 append_to_statement_list (x, &new_stmt);
1033
1034 x = lower_try_finally_fallthru_label (tf);
1035 x = build1 (GOTO_EXPR, void_type_node, x);
1036 append_to_statement_list (x, &new_stmt);
1037 }
1038
1039 if (tf->may_throw)
1040 {
1041 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1042 append_to_statement_list (x, &new_stmt);
1043
1044 x = lower_try_finally_dup_block (finally, state);
1045 lower_eh_constructs_1 (state, &x);
1046 append_to_statement_list (x, &new_stmt);
1047
1048 x = build_resx (get_eh_region_number (tf->region));
1049 append_to_statement_list (x, &new_stmt);
1050 }
1051
1052 if (tf->goto_queue)
1053 {
1054 struct goto_queue_node *q, *qe;
1055 tree return_val = NULL;
1056 int return_index, index;
1057 struct
1058 {
1059 struct goto_queue_node *q;
1060 tree label;
1061 } *labels;
1062
1063 if (tf->dest_array)
1064 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1065 else
1066 return_index = 0;
1067 labels = xcalloc (sizeof (*labels), return_index + 1);
1068
1069 q = tf->goto_queue;
1070 qe = q + tf->goto_queue_active;
1071 for (; q < qe; q++)
1072 {
1073 index = q->index < 0 ? return_index : q->index;
1074
1075 if (!labels[index].q)
1076 labels[index].q = q;
1077 }
1078
1079 for (index = 0; index < return_index + 1; index++)
1080 {
1081 tree lab;
1082
1083 q = labels[index].q;
1084 if (! q)
1085 continue;
1086
1087 lab = labels[index].label = create_artificial_label ();
1088
1089 if (index == return_index)
1090 do_return_redirection (q, lab, NULL, &return_val);
1091 else
1092 do_goto_redirection (q, lab, NULL);
1093
1094 x = build1 (LABEL_EXPR, void_type_node, lab);
1095 append_to_statement_list (x, &new_stmt);
1096
1097 x = lower_try_finally_dup_block (finally, state);
1098 lower_eh_constructs_1 (state, &x);
1099 append_to_statement_list (x, &new_stmt);
1100
1101 append_to_statement_list (q->cont_stmt, &new_stmt);
1102 maybe_record_in_goto_queue (state, q->cont_stmt);
1103 }
1104
1105 for (q = tf->goto_queue; q < qe; q++)
1106 {
1107 tree lab;
1108
1109 index = q->index < 0 ? return_index : q->index;
1110
1111 if (labels[index].q == q)
1112 continue;
1113
1114 lab = labels[index].label;
1115
1116 if (index == return_index)
1117 do_return_redirection (q, lab, NULL, &return_val);
1118 else
1119 do_goto_redirection (q, lab, NULL);
1120 }
1121
1122 replace_goto_queue (tf);
1123 free (labels);
1124 }
1125
1126 /* Need to link new stmts after running replace_goto_queue due
1127 to not wanting to process the same goto stmts twice. */
1128 append_to_statement_list (new_stmt, tf->top_p);
1129 }
1130
1131 /* A subroutine of lower_try_finally. There are multiple edges incoming
1132 and outgoing from the finally block. Implement this by instrumenting
1133 each incoming edge and creating a switch statement at the end of the
1134 finally block that branches to the appropriate destination. */
1135
1136 static void
1137 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1138 {
1139 struct goto_queue_node *q, *qe;
1140 tree return_val = NULL;
1141 tree finally, finally_tmp, finally_label;
1142 int return_index, eh_index, fallthru_index;
1143 int nlabels, ndests, j, last_case_index;
1144 tree case_label_vec, switch_stmt, last_case, switch_body;
1145 tree x;
1146
1147 /* Mash the TRY block to the head of the chain. */
1148 finally = TREE_OPERAND (*tf->top_p, 1);
1149 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1150
1151 /* Lower the finally block itself. */
1152 lower_eh_constructs_1 (state, &finally);
1153
1154 /* Prepare for switch statement generation. */
1155 if (tf->dest_array)
1156 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1157 else
1158 nlabels = 0;
1159 return_index = nlabels;
1160 eh_index = return_index + tf->may_return;
1161 fallthru_index = eh_index + tf->may_throw;
1162 ndests = fallthru_index + tf->may_fallthru;
1163
1164 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1165 finally_label = create_artificial_label ();
1166
1167 case_label_vec = make_tree_vec (ndests);
1168 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1169 NULL_TREE, case_label_vec);
1170 switch_body = NULL;
1171 last_case = NULL;
1172 last_case_index = 0;
1173
1174 /* Begin inserting code for getting to the finally block. Things
1175 are done in this order to correspond to the sequence the code is
1176 layed out. */
1177
1178 if (tf->may_fallthru)
1179 {
1180 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1181 build_int_cst (NULL_TREE, fallthru_index));
1182 append_to_statement_list (x, tf->top_p);
1183
1184 if (tf->may_throw)
1185 {
1186 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1187 append_to_statement_list (x, tf->top_p);
1188 }
1189
1190
1191 last_case = build (CASE_LABEL_EXPR, void_type_node,
1192 build_int_cst (NULL_TREE, fallthru_index), NULL,
1193 create_artificial_label ());
1194 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1195 last_case_index++;
1196
1197 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1198 append_to_statement_list (x, &switch_body);
1199
1200 x = lower_try_finally_fallthru_label (tf);
1201 x = build1 (GOTO_EXPR, void_type_node, x);
1202 append_to_statement_list (x, &switch_body);
1203 }
1204
1205 if (tf->may_throw)
1206 {
1207 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1208 append_to_statement_list (x, tf->top_p);
1209
1210 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1211 build_int_cst (NULL_TREE, eh_index));
1212 append_to_statement_list (x, tf->top_p);
1213
1214 last_case = build (CASE_LABEL_EXPR, void_type_node,
1215 build_int_cst (NULL_TREE, eh_index), NULL,
1216 create_artificial_label ());
1217 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1218 last_case_index++;
1219
1220 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1221 append_to_statement_list (x, &switch_body);
1222 x = build_resx (get_eh_region_number (tf->region));
1223 append_to_statement_list (x, &switch_body);
1224 }
1225
1226 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1227 append_to_statement_list (x, tf->top_p);
1228
1229 append_to_statement_list (finally, tf->top_p);
1230
1231 /* Redirect each incoming goto edge. */
1232 q = tf->goto_queue;
1233 qe = q + tf->goto_queue_active;
1234 j = last_case_index + tf->may_return;
1235 for (; q < qe; ++q)
1236 {
1237 tree mod;
1238 int switch_id, case_index;
1239
1240 if (q->index < 0)
1241 {
1242 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1243 build_int_cst (NULL_TREE, return_index));
1244 do_return_redirection (q, finally_label, mod, &return_val);
1245 switch_id = return_index;
1246 }
1247 else
1248 {
1249 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1250 build_int_cst (NULL_TREE, q->index));
1251 do_goto_redirection (q, finally_label, mod);
1252 switch_id = q->index;
1253 }
1254
1255 case_index = j + q->index;
1256 if (!TREE_VEC_ELT (case_label_vec, case_index))
1257 TREE_VEC_ELT (case_label_vec, case_index)
1258 = build (CASE_LABEL_EXPR, void_type_node,
1259 build_int_cst (NULL_TREE, switch_id), NULL,
1260 /* We store the cont_stmt in the
1261 CASE_LABEL, so that we can recover it
1262 in the loop below. We don't create
1263 the new label while walking the
1264 goto_queue because pointers don't
1265 offer a stable order. */
1266 q->cont_stmt);
1267 }
1268 for (j = last_case_index; j < last_case_index + nlabels; j++)
1269 {
1270 tree label;
1271 tree cont_stmt;
1272
1273 last_case = TREE_VEC_ELT (case_label_vec, j);
1274
1275 gcc_assert (last_case);
1276
1277 cont_stmt = CASE_LABEL (last_case);
1278
1279 label = create_artificial_label ();
1280 CASE_LABEL (last_case) = label;
1281
1282 x = build (LABEL_EXPR, void_type_node, label);
1283 append_to_statement_list (x, &switch_body);
1284 append_to_statement_list (cont_stmt, &switch_body);
1285 maybe_record_in_goto_queue (state, cont_stmt);
1286 }
1287 replace_goto_queue (tf);
1288
1289 /* Make sure that the last case is the default label, as one is required.
1290 Then sort the labels, which is also required in GIMPLE. */
1291 CASE_LOW (last_case) = NULL;
1292 sort_case_labels (case_label_vec);
1293
1294 /* Need to link switch_stmt after running replace_goto_queue due
1295 to not wanting to process the same goto stmts twice. */
1296 append_to_statement_list (switch_stmt, tf->top_p);
1297 append_to_statement_list (switch_body, tf->top_p);
1298 }
1299
1300 /* Decide whether or not we are going to duplicate the finally block.
1301 There are several considerations.
1302
1303 First, if this is Java, then the finally block contains code
1304 written by the user. It has line numbers associated with it,
1305 so duplicating the block means it's difficult to set a breakpoint.
1306 Since controlling code generation via -g is verboten, we simply
1307 never duplicate code without optimization.
1308
1309 Second, we'd like to prevent egregious code growth. One way to
1310 do this is to estimate the size of the finally block, multiply
1311 that by the number of copies we'd need to make, and compare against
1312 the estimate of the size of the switch machinery we'd have to add. */
1313
1314 static bool
1315 decide_copy_try_finally (int ndests, tree finally)
1316 {
1317 int f_estimate, sw_estimate;
1318
1319 if (!optimize)
1320 return false;
1321
1322 /* Finally estimate N times, plus N gotos. */
1323 f_estimate = estimate_num_insns (finally);
1324 f_estimate = (f_estimate + 1) * ndests;
1325
1326 /* Switch statement (cost 10), N variable assignments, N gotos. */
1327 sw_estimate = 10 + 2 * ndests;
1328
1329 /* Optimize for size clearly wants our best guess. */
1330 if (optimize_size)
1331 return f_estimate < sw_estimate;
1332
1333 /* ??? These numbers are completely made up so far. */
1334 if (optimize > 1)
1335 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1336 else
1337 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1338 }
1339
1340 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1341 to a sequence of labels and blocks, plus the exception region trees
1342 that record all the magic. This is complicated by the need to
1343 arrange for the FINALLY block to be executed on all exits. */
1344
1345 static void
1346 lower_try_finally (struct leh_state *state, tree *tp)
1347 {
1348 struct leh_tf_state this_tf;
1349 struct leh_state this_state;
1350 int ndests;
1351
1352 /* Process the try block. */
1353
1354 memset (&this_tf, 0, sizeof (this_tf));
1355 this_tf.try_finally_expr = *tp;
1356 this_tf.top_p = tp;
1357 this_tf.outer = state;
1358 if (using_eh_for_cleanups_p)
1359 this_tf.region
1360 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1361 else
1362 this_tf.region = NULL;
1363
1364 this_state.cur_region = this_tf.region;
1365 this_state.prev_try = state->prev_try;
1366 this_state.tf = &this_tf;
1367
1368 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1369
1370 /* Determine if the try block is escaped through the bottom. */
1371 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1372
1373 /* Determine if any exceptions are possible within the try block. */
1374 if (using_eh_for_cleanups_p)
1375 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1376 if (this_tf.may_throw)
1377 {
1378 this_tf.eh_label = create_artificial_label ();
1379 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1380 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1381 }
1382
1383 /* Sort the goto queue for efficient searching later. */
1384 if (this_tf.goto_queue_active > 1)
1385 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1386 sizeof (struct goto_queue_node), goto_queue_cmp);
1387
1388 /* Determine how many edges (still) reach the finally block. Or rather,
1389 how many destinations are reached by the finally block. Use this to
1390 determine how we process the finally block itself. */
1391
1392 if (this_tf.dest_array)
1393 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1394 else
1395 ndests = 0;
1396 ndests += this_tf.may_fallthru;
1397 ndests += this_tf.may_return;
1398 ndests += this_tf.may_throw;
1399
1400 /* If the FINALLY block is not reachable, dike it out. */
1401 if (ndests == 0)
1402 *tp = TREE_OPERAND (*tp, 0);
1403
1404 /* If the finally block doesn't fall through, then any destination
1405 we might try to impose there isn't reached either. There may be
1406 some minor amount of cleanup and redirection still needed. */
1407 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1408 lower_try_finally_nofallthru (state, &this_tf);
1409
1410 /* We can easily special-case redirection to a single destination. */
1411 else if (ndests == 1)
1412 lower_try_finally_onedest (state, &this_tf);
1413
1414 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1415 lower_try_finally_copy (state, &this_tf);
1416 else
1417 lower_try_finally_switch (state, &this_tf);
1418
1419 /* If someone requested we add a label at the end of the transformed
1420 block, do so. */
1421 if (this_tf.fallthru_label)
1422 {
1423 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1424 append_to_statement_list (x, tp);
1425 }
1426
1427 if (this_tf.goto_queue)
1428 free (this_tf.goto_queue);
1429 }
1430
1431 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1432 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1433 exception region trees that record all the magic. */
1434
1435 static void
1436 lower_catch (struct leh_state *state, tree *tp)
1437 {
1438 struct eh_region *try_region;
1439 struct leh_state this_state;
1440 tree_stmt_iterator i;
1441 tree out_label;
1442
1443 try_region = gen_eh_region_try (state->cur_region);
1444 this_state.cur_region = try_region;
1445 this_state.prev_try = try_region;
1446 this_state.tf = state->tf;
1447
1448 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1449
1450 if (!get_eh_region_may_contain_throw (try_region))
1451 {
1452 *tp = TREE_OPERAND (*tp, 0);
1453 return;
1454 }
1455
1456 out_label = NULL;
1457 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1458 {
1459 struct eh_region *catch_region;
1460 tree catch, x, eh_label;
1461
1462 catch = tsi_stmt (i);
1463 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1464
1465 this_state.cur_region = catch_region;
1466 this_state.prev_try = state->prev_try;
1467 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1468
1469 eh_label = create_artificial_label ();
1470 set_eh_region_tree_label (catch_region, eh_label);
1471
1472 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1473 tsi_link_before (&i, x, TSI_SAME_STMT);
1474
1475 if (block_may_fallthru (CATCH_BODY (catch)))
1476 {
1477 if (!out_label)
1478 out_label = create_artificial_label ();
1479
1480 x = build1 (GOTO_EXPR, void_type_node, out_label);
1481 append_to_statement_list (x, &CATCH_BODY (catch));
1482 }
1483
1484 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1485 tsi_delink (&i);
1486 }
1487
1488 frob_into_branch_around (tp, NULL, out_label);
1489 }
1490
1491 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1492 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1493 region trees that record all the magic. */
1494
1495 static void
1496 lower_eh_filter (struct leh_state *state, tree *tp)
1497 {
1498 struct leh_state this_state;
1499 struct eh_region *this_region;
1500 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1501 tree eh_label;
1502
1503 if (EH_FILTER_MUST_NOT_THROW (inner))
1504 this_region = gen_eh_region_must_not_throw (state->cur_region);
1505 else
1506 this_region = gen_eh_region_allowed (state->cur_region,
1507 EH_FILTER_TYPES (inner));
1508 this_state = *state;
1509 this_state.cur_region = this_region;
1510
1511 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1512
1513 if (!get_eh_region_may_contain_throw (this_region))
1514 {
1515 *tp = TREE_OPERAND (*tp, 0);
1516 return;
1517 }
1518
1519 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1520 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1521
1522 eh_label = create_artificial_label ();
1523 set_eh_region_tree_label (this_region, eh_label);
1524
1525 frob_into_branch_around (tp, eh_label, NULL);
1526 }
1527
1528 /* Implement a cleanup expression. This is similar to try-finally,
1529 except that we only execute the cleanup block for exception edges. */
1530
1531 static void
1532 lower_cleanup (struct leh_state *state, tree *tp)
1533 {
1534 struct leh_state this_state;
1535 struct eh_region *this_region;
1536 struct leh_tf_state fake_tf;
1537
1538 /* If not using eh, then exception-only cleanups are no-ops. */
1539 if (!flag_exceptions)
1540 {
1541 *tp = TREE_OPERAND (*tp, 0);
1542 lower_eh_constructs_1 (state, tp);
1543 return;
1544 }
1545
1546 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1547 this_state = *state;
1548 this_state.cur_region = this_region;
1549
1550 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1551
1552 if (!get_eh_region_may_contain_throw (this_region))
1553 {
1554 *tp = TREE_OPERAND (*tp, 0);
1555 return;
1556 }
1557
1558 /* Build enough of a try-finally state so that we can reuse
1559 honor_protect_cleanup_actions. */
1560 memset (&fake_tf, 0, sizeof (fake_tf));
1561 fake_tf.top_p = tp;
1562 fake_tf.outer = state;
1563 fake_tf.region = this_region;
1564 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1565 fake_tf.may_throw = true;
1566
1567 fake_tf.eh_label = create_artificial_label ();
1568 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1569
1570 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1571
1572 if (fake_tf.may_throw)
1573 {
1574 /* In this case honor_protect_cleanup_actions had nothing to do,
1575 and we should process this normally. */
1576 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1577 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1578 }
1579 else
1580 {
1581 /* In this case honor_protect_cleanup_actions did nearly all of
1582 the work. All we have left is to append the fallthru_label. */
1583
1584 *tp = TREE_OPERAND (*tp, 0);
1585 if (fake_tf.fallthru_label)
1586 {
1587 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1588 append_to_statement_list (x, tp);
1589 }
1590 }
1591 }
1592
1593 /* Main loop for lowering eh constructs. */
1594
1595 static void
1596 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1597 {
1598 tree_stmt_iterator i;
1599 tree t = *tp;
1600
1601 switch (TREE_CODE (t))
1602 {
1603 case COND_EXPR:
1604 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1605 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1606 break;
1607
1608 case CALL_EXPR:
1609 /* Look for things that can throw exceptions, and record them. */
1610 if (state->cur_region && tree_could_throw_p (t))
1611 {
1612 record_stmt_eh_region (state->cur_region, t);
1613 note_eh_region_may_contain_throw (state->cur_region);
1614 }
1615 break;
1616
1617 case MODIFY_EXPR:
1618 /* Look for things that can throw exceptions, and record them. */
1619 if (state->cur_region && tree_could_throw_p (t))
1620 {
1621 tree op;
1622
1623 record_stmt_eh_region (state->cur_region, t);
1624 note_eh_region_may_contain_throw (state->cur_region);
1625
1626 /* ??? For the benefit of calls.c, converting all this to rtl,
1627 we need to record the call expression, not just the outer
1628 modify statement. */
1629 op = get_call_expr_in (t);
1630 if (op)
1631 record_stmt_eh_region (state->cur_region, op);
1632 }
1633 break;
1634
1635 case GOTO_EXPR:
1636 case RETURN_EXPR:
1637 maybe_record_in_goto_queue (state, t);
1638 break;
1639 case SWITCH_EXPR:
1640 verify_norecord_switch_expr (state, t);
1641 break;
1642
1643 case TRY_FINALLY_EXPR:
1644 lower_try_finally (state, tp);
1645 break;
1646
1647 case TRY_CATCH_EXPR:
1648 i = tsi_start (TREE_OPERAND (t, 1));
1649 switch (TREE_CODE (tsi_stmt (i)))
1650 {
1651 case CATCH_EXPR:
1652 lower_catch (state, tp);
1653 break;
1654 case EH_FILTER_EXPR:
1655 lower_eh_filter (state, tp);
1656 break;
1657 default:
1658 lower_cleanup (state, tp);
1659 break;
1660 }
1661 break;
1662
1663 case STATEMENT_LIST:
1664 for (i = tsi_start (t); !tsi_end_p (i); )
1665 {
1666 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1667 t = tsi_stmt (i);
1668 if (TREE_CODE (t) == STATEMENT_LIST)
1669 {
1670 tsi_link_before (&i, t, TSI_SAME_STMT);
1671 tsi_delink (&i);
1672 }
1673 else
1674 tsi_next (&i);
1675 }
1676 break;
1677
1678 default:
1679 /* A type, a decl, or some kind of statement that we're not
1680 interested in. Don't walk them. */
1681 break;
1682 }
1683 }
1684
1685 static void
1686 lower_eh_constructs (void)
1687 {
1688 struct leh_state null_state;
1689 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1690
1691 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1692 set_eh_throw_stmt_table (cfun, htab_create_ggc (31, struct_ptr_hash,
1693 struct_ptr_eq,
1694 ggc_free));
1695
1696 collect_finally_tree (*tp, NULL);
1697
1698 memset (&null_state, 0, sizeof (null_state));
1699 lower_eh_constructs_1 (&null_state, tp);
1700
1701 htab_delete (finally_tree);
1702
1703 collect_eh_region_array ();
1704 }
1705
1706 struct tree_opt_pass pass_lower_eh =
1707 {
1708 "eh", /* name */
1709 NULL, /* gate */
1710 lower_eh_constructs, /* execute */
1711 NULL, /* sub */
1712 NULL, /* next */
1713 0, /* static_pass_number */
1714 TV_TREE_EH, /* tv_id */
1715 PROP_gimple_lcf, /* properties_required */
1716 PROP_gimple_leh, /* properties_provided */
1717 PROP_gimple_lcf, /* properties_destroyed */
1718 0, /* todo_flags_start */
1719 TODO_dump_func, /* todo_flags_finish */
1720 0 /* letter */
1721 };
1722
1723 \f
1724 /* Construct EH edges for STMT. */
1725
1726 static void
1727 make_eh_edge (struct eh_region *region, void *data)
1728 {
1729 tree stmt, lab;
1730 basic_block src, dst;
1731
1732 stmt = data;
1733 lab = get_eh_region_tree_label (region);
1734
1735 src = bb_for_stmt (stmt);
1736 dst = label_to_block (lab);
1737
1738 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1739 }
1740
1741 void
1742 make_eh_edges (tree stmt)
1743 {
1744 int region_nr;
1745 bool is_resx;
1746
1747 if (TREE_CODE (stmt) == RESX_EXPR)
1748 {
1749 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1750 is_resx = true;
1751 }
1752 else
1753 {
1754 region_nr = lookup_stmt_eh_region (stmt);
1755 if (region_nr < 0)
1756 return;
1757 is_resx = false;
1758 }
1759
1760 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1761 }
1762
1763 static bool mark_eh_edge_found_error;
1764
1765 /* Mark edge make_eh_edge would create for given region by setting it aux
1766 field, output error if something goes wrong. */
1767 static void
1768 mark_eh_edge (struct eh_region *region, void *data)
1769 {
1770 tree stmt, lab;
1771 basic_block src, dst;
1772 edge e;
1773
1774 stmt = data;
1775 lab = get_eh_region_tree_label (region);
1776
1777 src = bb_for_stmt (stmt);
1778 dst = label_to_block (lab);
1779
1780 e = find_edge (src, dst);
1781 if (!e)
1782 {
1783 error ("EH edge %i->%i is missing %i %i.", src->index, dst->index, src, dst);
1784 mark_eh_edge_found_error = true;
1785 }
1786 else if (!(e->flags & EDGE_EH))
1787 {
1788 error ("EH edge %i->%i miss EH flag.", src->index, dst->index);
1789 mark_eh_edge_found_error = true;
1790 }
1791 else if (e->aux)
1792 {
1793 /* ??? might not be mistake. */
1794 error ("EH edge %i->%i has duplicated regions.", src->index, dst->index);
1795 mark_eh_edge_found_error = true;
1796 }
1797 else
1798 e->aux = (void *)1;
1799 }
1800
1801 /* Verify that BB containing stmt as last stmt has precisely the edges
1802 make_eh_edges would create. */
1803 bool
1804 verify_eh_edges (tree stmt)
1805 {
1806 int region_nr;
1807 bool is_resx;
1808 basic_block bb = bb_for_stmt (stmt);
1809 edge_iterator ei;
1810 edge e;
1811
1812 FOR_EACH_EDGE (e, ei, bb->succs)
1813 gcc_assert (!e->aux);
1814 mark_eh_edge_found_error = false;
1815 if (TREE_CODE (stmt) == RESX_EXPR)
1816 {
1817 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1818 is_resx = true;
1819 }
1820 else
1821 {
1822 region_nr = lookup_stmt_eh_region (stmt);
1823 if (region_nr < 0)
1824 {
1825 FOR_EACH_EDGE (e, ei, bb->succs)
1826 if (e->flags & EDGE_EH)
1827 {
1828 error ("BB %i can not throw but has EH edges", bb->index);
1829 return true;
1830 }
1831 return false;
1832 }
1833 if (!tree_could_throw_p (stmt))
1834 {
1835 error ("BB %i last statement has incorrectly set region", bb->index);
1836 return true;
1837 }
1838 is_resx = false;
1839 }
1840
1841 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1842 FOR_EACH_EDGE (e, ei, bb->succs)
1843 {
1844 if ((e->flags & EDGE_EH) && !e->aux)
1845 {
1846 error ("Unnecesary EH edge %i->%i", bb->index, e->dest->index);
1847 mark_eh_edge_found_error = true;
1848 return true;
1849 }
1850 e->aux = NULL;
1851 }
1852 return mark_eh_edge_found_error;
1853 }
1854
1855 \f
1856 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1857 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1858 This routine expects only GIMPLE lhs or rhs input. */
1859
1860 bool
1861 tree_could_trap_p (tree expr)
1862 {
1863 enum tree_code code = TREE_CODE (expr);
1864 bool honor_nans = false;
1865 bool honor_snans = false;
1866 bool fp_operation = false;
1867 bool honor_trapv = false;
1868 tree t, base;
1869
1870 if (TREE_CODE_CLASS (code) == tcc_comparison
1871 || TREE_CODE_CLASS (code) == tcc_unary
1872 || TREE_CODE_CLASS (code) == tcc_binary)
1873 {
1874 t = TREE_TYPE (expr);
1875 fp_operation = FLOAT_TYPE_P (t);
1876 if (fp_operation)
1877 {
1878 honor_nans = flag_trapping_math && !flag_finite_math_only;
1879 honor_snans = flag_signaling_nans != 0;
1880 }
1881 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1882 honor_trapv = true;
1883 }
1884
1885 restart:
1886 switch (code)
1887 {
1888 case COMPONENT_REF:
1889 case REALPART_EXPR:
1890 case IMAGPART_EXPR:
1891 case BIT_FIELD_REF:
1892 case WITH_SIZE_EXPR:
1893 expr = TREE_OPERAND (expr, 0);
1894 code = TREE_CODE (expr);
1895 goto restart;
1896
1897 case ARRAY_RANGE_REF:
1898 /* Let us be conservative here for now. We might be checking bounds of
1899 the access similarly to the case below. */
1900 if (!TREE_THIS_NOTRAP (expr))
1901 return true;
1902
1903 base = TREE_OPERAND (expr, 0);
1904 return tree_could_trap_p (base);
1905
1906 case ARRAY_REF:
1907 base = TREE_OPERAND (expr, 0);
1908 if (tree_could_trap_p (base))
1909 return true;
1910
1911 if (TREE_THIS_NOTRAP (expr))
1912 return false;
1913
1914 return !in_array_bounds_p (expr);
1915
1916 case INDIRECT_REF:
1917 case ALIGN_INDIRECT_REF:
1918 case MISALIGNED_INDIRECT_REF:
1919 return !TREE_THIS_NOTRAP (expr);
1920
1921 case ASM_EXPR:
1922 return TREE_THIS_VOLATILE (expr);
1923
1924 case TRUNC_DIV_EXPR:
1925 case CEIL_DIV_EXPR:
1926 case FLOOR_DIV_EXPR:
1927 case ROUND_DIV_EXPR:
1928 case EXACT_DIV_EXPR:
1929 case CEIL_MOD_EXPR:
1930 case FLOOR_MOD_EXPR:
1931 case ROUND_MOD_EXPR:
1932 case TRUNC_MOD_EXPR:
1933 case RDIV_EXPR:
1934 if (honor_snans || honor_trapv)
1935 return true;
1936 if (fp_operation)
1937 return flag_trapping_math;
1938 t = TREE_OPERAND (expr, 1);
1939 if (!TREE_CONSTANT (t) || integer_zerop (t))
1940 return true;
1941 return false;
1942
1943 case LT_EXPR:
1944 case LE_EXPR:
1945 case GT_EXPR:
1946 case GE_EXPR:
1947 case LTGT_EXPR:
1948 /* Some floating point comparisons may trap. */
1949 return honor_nans;
1950
1951 case EQ_EXPR:
1952 case NE_EXPR:
1953 case UNORDERED_EXPR:
1954 case ORDERED_EXPR:
1955 case UNLT_EXPR:
1956 case UNLE_EXPR:
1957 case UNGT_EXPR:
1958 case UNGE_EXPR:
1959 case UNEQ_EXPR:
1960 return honor_snans;
1961
1962 case CONVERT_EXPR:
1963 case FIX_TRUNC_EXPR:
1964 case FIX_CEIL_EXPR:
1965 case FIX_FLOOR_EXPR:
1966 case FIX_ROUND_EXPR:
1967 /* Conversion of floating point might trap. */
1968 return honor_nans;
1969
1970 case NEGATE_EXPR:
1971 case ABS_EXPR:
1972 case CONJ_EXPR:
1973 /* These operations don't trap with floating point. */
1974 if (honor_trapv)
1975 return true;
1976 return false;
1977
1978 case PLUS_EXPR:
1979 case MINUS_EXPR:
1980 case MULT_EXPR:
1981 /* Any floating arithmetic may trap. */
1982 if (fp_operation && flag_trapping_math)
1983 return true;
1984 if (honor_trapv)
1985 return true;
1986 return false;
1987
1988 case CALL_EXPR:
1989 t = get_callee_fndecl (expr);
1990 /* Assume that calls to weak functions may trap. */
1991 if (!t || !DECL_P (t) || DECL_WEAK (t))
1992 return true;
1993 return false;
1994
1995 default:
1996 /* Any floating arithmetic may trap. */
1997 if (fp_operation && flag_trapping_math)
1998 return true;
1999 return false;
2000 }
2001 }
2002
2003 bool
2004 tree_could_throw_p (tree t)
2005 {
2006 if (!flag_exceptions)
2007 return false;
2008 if (TREE_CODE (t) == MODIFY_EXPR)
2009 {
2010 if (flag_non_call_exceptions
2011 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2012 return true;
2013 t = TREE_OPERAND (t, 1);
2014 }
2015
2016 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2017 t = TREE_OPERAND (t, 0);
2018 if (TREE_CODE (t) == CALL_EXPR)
2019 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2020 if (flag_non_call_exceptions)
2021 return tree_could_trap_p (t);
2022 return false;
2023 }
2024
2025 bool
2026 tree_can_throw_internal (tree stmt)
2027 {
2028 int region_nr = lookup_stmt_eh_region (stmt);
2029 if (region_nr < 0)
2030 return false;
2031 return can_throw_internal_1 (region_nr);
2032 }
2033
2034 bool
2035 tree_can_throw_external (tree stmt)
2036 {
2037 int region_nr = lookup_stmt_eh_region (stmt);
2038 if (region_nr < 0)
2039 return false;
2040 return can_throw_external_1 (region_nr);
2041 }
2042
2043 bool
2044 maybe_clean_eh_stmt (tree stmt)
2045 {
2046 if (!tree_could_throw_p (stmt))
2047 if (remove_stmt_from_eh_region (stmt))
2048 return true;
2049 return false;
2050 }
2051