re PR target/21412 (ICE loading TLS address)
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39
40 \f
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
43
44 void
45 using_eh_for_cleanups (void)
46 {
47 using_eh_for_cleanups_p = 1;
48 }
49 \f
50 /* Misc functions used in this file. */
51
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
55
56 static int
57 struct_ptr_eq (const void *a, const void *b)
58 {
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
62 }
63
64 static hashval_t
65 struct_ptr_hash (const void *a)
66 {
67 const void * const * x = a;
68 return (size_t)*x >> 4;
69 }
70
71 \f
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
75
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
79
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
83
84 static void
85 record_stmt_eh_region (struct eh_region *region, tree t)
86 {
87 struct throw_stmt_node *n;
88 void **slot;
89
90 if (!region)
91 return;
92
93 n = ggc_alloc (sizeof (*n));
94 n->stmt = t;
95 n->region_nr = get_eh_region_number (region);
96
97 slot = htab_find_slot (get_eh_throw_stmt_table (cfun), n, INSERT);
98 gcc_assert (!*slot);
99 *slot = n;
100 }
101
102 void
103 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
104 {
105 struct throw_stmt_node *n;
106 void **slot;
107
108 gcc_assert (num >= 0);
109 gcc_assert (TREE_CODE (t) != RESX_EXPR);
110
111 n = ggc_alloc (sizeof (*n));
112 n->stmt = t;
113 n->region_nr = num;
114
115 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
116 gcc_assert (!*slot);
117 *slot = n;
118 }
119
120 void
121 add_stmt_to_eh_region (tree t, int num)
122 {
123 add_stmt_to_eh_region_fn (cfun, t, num);
124 }
125
126 bool
127 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
128 {
129 struct throw_stmt_node dummy;
130 void **slot;
131
132 if (!get_eh_throw_stmt_table (ifun))
133 return false;
134
135 dummy.stmt = t;
136 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
137 NO_INSERT);
138 if (slot)
139 {
140 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
141 return true;
142 }
143 else
144 return false;
145 }
146
147 bool
148 remove_stmt_from_eh_region (tree t)
149 {
150 return remove_stmt_from_eh_region_fn (cfun, t);
151 }
152
153 int
154 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
155 {
156 struct throw_stmt_node *p, n;
157
158 if (!get_eh_throw_stmt_table (ifun))
159 return -2;
160
161 n.stmt = t;
162 p = htab_find (get_eh_throw_stmt_table (ifun), &n);
163
164 return (p ? p->region_nr : -1);
165 }
166
167 int
168 lookup_stmt_eh_region (tree t)
169 {
170 /* We can get called from initialized data when -fnon-call-exceptions
171 is on; prevent crash. */
172 if (!cfun)
173 return -1;
174 return lookup_stmt_eh_region_fn (cfun, t);
175 }
176
177 \f
178 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
179 nodes and LABEL_DECL nodes. We will use this during the second phase to
180 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
181
182 struct finally_tree_node
183 {
184 tree child, parent;
185 };
186
187 /* Note that this table is *not* marked GTY. It is short-lived. */
188 static htab_t finally_tree;
189
190 static void
191 record_in_finally_tree (tree child, tree parent)
192 {
193 struct finally_tree_node *n;
194 void **slot;
195
196 n = xmalloc (sizeof (*n));
197 n->child = child;
198 n->parent = parent;
199
200 slot = htab_find_slot (finally_tree, n, INSERT);
201 gcc_assert (!*slot);
202 *slot = n;
203 }
204
205 static void
206 collect_finally_tree (tree t, tree region)
207 {
208 tailrecurse:
209 switch (TREE_CODE (t))
210 {
211 case LABEL_EXPR:
212 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
213 break;
214
215 case TRY_FINALLY_EXPR:
216 record_in_finally_tree (t, region);
217 collect_finally_tree (TREE_OPERAND (t, 0), t);
218 t = TREE_OPERAND (t, 1);
219 goto tailrecurse;
220
221 case TRY_CATCH_EXPR:
222 collect_finally_tree (TREE_OPERAND (t, 0), region);
223 t = TREE_OPERAND (t, 1);
224 goto tailrecurse;
225
226 case CATCH_EXPR:
227 t = CATCH_BODY (t);
228 goto tailrecurse;
229
230 case EH_FILTER_EXPR:
231 t = EH_FILTER_FAILURE (t);
232 goto tailrecurse;
233
234 case STATEMENT_LIST:
235 {
236 tree_stmt_iterator i;
237 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
238 collect_finally_tree (tsi_stmt (i), region);
239 }
240 break;
241
242 default:
243 /* A type, a decl, or some kind of statement that we're not
244 interested in. Don't walk them. */
245 break;
246 }
247 }
248
249 /* Use the finally tree to determine if a jump from START to TARGET
250 would leave the try_finally node that START lives in. */
251
252 static bool
253 outside_finally_tree (tree start, tree target)
254 {
255 struct finally_tree_node n, *p;
256
257 do
258 {
259 n.child = start;
260 p = htab_find (finally_tree, &n);
261 if (!p)
262 return true;
263 start = p->parent;
264 }
265 while (start != target);
266
267 return false;
268 }
269 \f
270 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
271 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
272 The eh region creation is straight-forward, but frobbing all the gotos
273 and such into shape isn't. */
274
275 /* State of the world while lowering. */
276
277 struct leh_state
278 {
279 /* What's "current" while constructing the eh region tree. These
280 correspond to variables of the same name in cfun->eh, which we
281 don't have easy access to. */
282 struct eh_region *cur_region;
283 struct eh_region *prev_try;
284
285 /* Processing of TRY_FINALLY requires a bit more state. This is
286 split out into a separate structure so that we don't have to
287 copy so much when processing other nodes. */
288 struct leh_tf_state *tf;
289 };
290
291 struct leh_tf_state
292 {
293 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
294 is the original TRY_FINALLY_EXPR. We need to retain this so that
295 outside_finally_tree can reliably reference the tree used in the
296 collect_finally_tree data structures. */
297 tree try_finally_expr;
298 tree *top_p;
299
300 /* The state outside this try_finally node. */
301 struct leh_state *outer;
302
303 /* The exception region created for it. */
304 struct eh_region *region;
305
306 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
307 that are seen to escape this TRY_FINALLY_EXPR node. */
308 struct goto_queue_node {
309 tree stmt;
310 tree repl_stmt;
311 tree cont_stmt;
312 int index;
313 } *goto_queue;
314 size_t goto_queue_size;
315 size_t goto_queue_active;
316
317 /* The set of unique labels seen as entries in the goto queue. */
318 varray_type dest_array;
319
320 /* A label to be added at the end of the completed transformed
321 sequence. It will be set if may_fallthru was true *at one time*,
322 though subsequent transformations may have cleared that flag. */
323 tree fallthru_label;
324
325 /* A label that has been registered with except.c to be the
326 landing pad for this try block. */
327 tree eh_label;
328
329 /* True if it is possible to fall out the bottom of the try block.
330 Cleared if the fallthru is converted to a goto. */
331 bool may_fallthru;
332
333 /* True if any entry in goto_queue is a RETURN_EXPR. */
334 bool may_return;
335
336 /* True if the finally block can receive an exception edge.
337 Cleared if the exception case is handled by code duplication. */
338 bool may_throw;
339 };
340
341 static void lower_eh_filter (struct leh_state *, tree *);
342 static void lower_eh_constructs_1 (struct leh_state *, tree *);
343
344 /* Comparison function for qsort/bsearch. We're interested in
345 searching goto queue elements for source statements. */
346
347 static int
348 goto_queue_cmp (const void *x, const void *y)
349 {
350 tree a = ((const struct goto_queue_node *)x)->stmt;
351 tree b = ((const struct goto_queue_node *)y)->stmt;
352 return (a == b ? 0 : a < b ? -1 : 1);
353 }
354
355 /* Search for STMT in the goto queue. Return the replacement,
356 or null if the statement isn't in the queue. */
357
358 static tree
359 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
360 {
361 struct goto_queue_node tmp, *ret;
362 tmp.stmt = stmt;
363 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
364 sizeof (struct goto_queue_node), goto_queue_cmp);
365 return (ret ? ret->repl_stmt : NULL);
366 }
367
368 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
369 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
370 then we can just splat it in, otherwise we add the new stmts immediately
371 after the COND_EXPR and redirect. */
372
373 static void
374 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
375 tree_stmt_iterator *tsi)
376 {
377 tree new, one, label;
378
379 new = find_goto_replacement (tf, *tp);
380 if (!new)
381 return;
382
383 one = expr_only (new);
384 if (one && TREE_CODE (one) == GOTO_EXPR)
385 {
386 *tp = one;
387 return;
388 }
389
390 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
391 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
392
393 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
394 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
395 }
396
397 /* The real work of replace_goto_queue. Returns with TSI updated to
398 point to the next statement. */
399
400 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
401
402 static void
403 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
404 {
405 switch (TREE_CODE (t))
406 {
407 case GOTO_EXPR:
408 case RETURN_EXPR:
409 t = find_goto_replacement (tf, t);
410 if (t)
411 {
412 tsi_link_before (tsi, t, TSI_SAME_STMT);
413 tsi_delink (tsi);
414 return;
415 }
416 break;
417
418 case COND_EXPR:
419 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
420 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
421 break;
422
423 case TRY_FINALLY_EXPR:
424 case TRY_CATCH_EXPR:
425 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
426 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
427 break;
428 case CATCH_EXPR:
429 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
430 break;
431 case EH_FILTER_EXPR:
432 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
433 break;
434
435 case STATEMENT_LIST:
436 gcc_unreachable ();
437
438 default:
439 /* These won't have gotos in them. */
440 break;
441 }
442
443 tsi_next (tsi);
444 }
445
446 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
447
448 static void
449 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
450 {
451 tree_stmt_iterator i = tsi_start (t);
452 while (!tsi_end_p (i))
453 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
454 }
455
456 /* Replace all goto queue members. */
457
458 static void
459 replace_goto_queue (struct leh_tf_state *tf)
460 {
461 if (tf->goto_queue_active == 0)
462 return;
463 replace_goto_queue_stmt_list (*tf->top_p, tf);
464 }
465
466 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
467 node, and if so record that fact in the goto queue associated with that
468 try_finally node. */
469
470 static void
471 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
472 {
473 struct leh_tf_state *tf = state->tf;
474 struct goto_queue_node *q;
475 size_t active, size;
476 int index;
477
478 if (!tf)
479 return;
480
481 switch (TREE_CODE (stmt))
482 {
483 case GOTO_EXPR:
484 {
485 tree lab = GOTO_DESTINATION (stmt);
486
487 /* Computed and non-local gotos do not get processed. Given
488 their nature we can neither tell whether we've escaped the
489 finally block nor redirect them if we knew. */
490 if (TREE_CODE (lab) != LABEL_DECL)
491 return;
492
493 /* No need to record gotos that don't leave the try block. */
494 if (! outside_finally_tree (lab, tf->try_finally_expr))
495 return;
496
497 if (! tf->dest_array)
498 {
499 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
500 VARRAY_PUSH_TREE (tf->dest_array, lab);
501 index = 0;
502 }
503 else
504 {
505 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
506 for (index = 0; index < n; ++index)
507 if (VARRAY_TREE (tf->dest_array, index) == lab)
508 break;
509 if (index == n)
510 VARRAY_PUSH_TREE (tf->dest_array, lab);
511 }
512 }
513 break;
514
515 case RETURN_EXPR:
516 tf->may_return = true;
517 index = -1;
518 break;
519
520 default:
521 gcc_unreachable ();
522 }
523
524 active = tf->goto_queue_active;
525 size = tf->goto_queue_size;
526 if (active >= size)
527 {
528 size = (size ? size * 2 : 32);
529 tf->goto_queue_size = size;
530 tf->goto_queue
531 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
532 }
533
534 q = &tf->goto_queue[active];
535 tf->goto_queue_active = active + 1;
536
537 memset (q, 0, sizeof (*q));
538 q->stmt = stmt;
539 q->index = index;
540 }
541
542 #ifdef ENABLE_CHECKING
543 /* We do not process SWITCH_EXPRs for now. As long as the original source
544 was in fact structured, and we've not yet done jump threading, then none
545 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
546
547 static void
548 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
549 {
550 struct leh_tf_state *tf = state->tf;
551 size_t i, n;
552 tree vec;
553
554 if (!tf)
555 return;
556
557 vec = SWITCH_LABELS (switch_expr);
558 n = TREE_VEC_LENGTH (vec);
559
560 for (i = 0; i < n; ++i)
561 {
562 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
563 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
564 }
565 }
566 #else
567 #define verify_norecord_switch_expr(state, switch_expr)
568 #endif
569
570 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
571 whatever is needed to finish the return. If MOD is non-null, insert it
572 before the new branch. RETURN_VALUE_P is a cache containing a temporary
573 variable to be used in manipulating the value returned from the function. */
574
575 static void
576 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
577 tree *return_value_p)
578 {
579 tree ret_expr = TREE_OPERAND (q->stmt, 0);
580 tree x;
581
582 if (ret_expr)
583 {
584 /* The nasty part about redirecting the return value is that the
585 return value itself is to be computed before the FINALLY block
586 is executed. e.g.
587
588 int x;
589 int foo (void)
590 {
591 x = 0;
592 try {
593 return x;
594 } finally {
595 x++;
596 }
597 }
598
599 should return 0, not 1. Arrange for this to happen by copying
600 computed the return value into a local temporary. This also
601 allows us to redirect multiple return statements through the
602 same destination block; whether this is a net win or not really
603 depends, I guess, but it does make generation of the switch in
604 lower_try_finally_switch easier. */
605
606 switch (TREE_CODE (ret_expr))
607 {
608 case RESULT_DECL:
609 if (!*return_value_p)
610 *return_value_p = ret_expr;
611 else
612 gcc_assert (*return_value_p == ret_expr);
613 q->cont_stmt = q->stmt;
614 break;
615
616 case MODIFY_EXPR:
617 {
618 tree result = TREE_OPERAND (ret_expr, 0);
619 tree new, old = TREE_OPERAND (ret_expr, 1);
620
621 if (!*return_value_p)
622 {
623 if (aggregate_value_p (TREE_TYPE (result),
624 TREE_TYPE (current_function_decl)))
625 /* If this function returns in memory, copy the argument
626 into the return slot now. Otherwise, we might need to
627 worry about magic return semantics, so we need to use a
628 temporary to hold the value until we're actually ready
629 to return. */
630 new = result;
631 else
632 new = create_tmp_var (TREE_TYPE (old), "rettmp");
633 *return_value_p = new;
634 }
635 else
636 new = *return_value_p;
637
638 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
639 append_to_statement_list (x, &q->repl_stmt);
640
641 if (new == result)
642 x = result;
643 else
644 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
645 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
646 }
647
648 default:
649 gcc_unreachable ();
650 }
651 }
652 else
653 {
654 /* If we don't return a value, all return statements are the same. */
655 q->cont_stmt = q->stmt;
656 }
657
658 if (mod)
659 append_to_statement_list (mod, &q->repl_stmt);
660
661 x = build1 (GOTO_EXPR, void_type_node, finlab);
662 append_to_statement_list (x, &q->repl_stmt);
663 }
664
665 /* Similar, but easier, for GOTO_EXPR. */
666
667 static void
668 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
669 {
670 tree x;
671
672 q->cont_stmt = q->stmt;
673 if (mod)
674 append_to_statement_list (mod, &q->repl_stmt);
675
676 x = build1 (GOTO_EXPR, void_type_node, finlab);
677 append_to_statement_list (x, &q->repl_stmt);
678 }
679
680 /* We want to transform
681 try { body; } catch { stuff; }
682 to
683 body; goto over; lab: stuff; over:
684
685 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
686 should be placed before the second operand, or NULL. OVER is
687 an existing label that should be put at the exit, or NULL. */
688
689 static void
690 frob_into_branch_around (tree *tp, tree lab, tree over)
691 {
692 tree x, op1;
693
694 op1 = TREE_OPERAND (*tp, 1);
695 *tp = TREE_OPERAND (*tp, 0);
696
697 if (block_may_fallthru (*tp))
698 {
699 if (!over)
700 over = create_artificial_label ();
701 x = build1 (GOTO_EXPR, void_type_node, over);
702 append_to_statement_list (x, tp);
703 }
704
705 if (lab)
706 {
707 x = build1 (LABEL_EXPR, void_type_node, lab);
708 append_to_statement_list (x, tp);
709 }
710
711 append_to_statement_list (op1, tp);
712
713 if (over)
714 {
715 x = build1 (LABEL_EXPR, void_type_node, over);
716 append_to_statement_list (x, tp);
717 }
718 }
719
720 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
721 Make sure to record all new labels found. */
722
723 static tree
724 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
725 {
726 tree region = NULL;
727
728 t = unsave_expr_now (t);
729
730 if (outer_state->tf)
731 region = outer_state->tf->try_finally_expr;
732 collect_finally_tree (t, region);
733
734 return t;
735 }
736
737 /* A subroutine of lower_try_finally. Create a fallthru label for
738 the given try_finally state. The only tricky bit here is that
739 we have to make sure to record the label in our outer context. */
740
741 static tree
742 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
743 {
744 tree label = tf->fallthru_label;
745 if (!label)
746 {
747 label = create_artificial_label ();
748 tf->fallthru_label = label;
749 if (tf->outer->tf)
750 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
751 }
752 return label;
753 }
754
755 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
756 returns non-null, then the language requires that the exception path out
757 of a try_finally be treated specially. To wit: the code within the
758 finally block may not itself throw an exception. We have two choices here.
759 First we can duplicate the finally block and wrap it in a must_not_throw
760 region. Second, we can generate code like
761
762 try {
763 finally_block;
764 } catch {
765 if (fintmp == eh_edge)
766 protect_cleanup_actions;
767 }
768
769 where "fintmp" is the temporary used in the switch statement generation
770 alternative considered below. For the nonce, we always choose the first
771 option.
772
773 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
774
775 static void
776 honor_protect_cleanup_actions (struct leh_state *outer_state,
777 struct leh_state *this_state,
778 struct leh_tf_state *tf)
779 {
780 tree protect_cleanup_actions, finally, x;
781 tree_stmt_iterator i;
782 bool finally_may_fallthru;
783
784 /* First check for nothing to do. */
785 if (lang_protect_cleanup_actions)
786 protect_cleanup_actions = lang_protect_cleanup_actions ();
787 else
788 protect_cleanup_actions = NULL;
789
790 finally = TREE_OPERAND (*tf->top_p, 1);
791
792 /* If the EH case of the finally block can fall through, this may be a
793 structure of the form
794 try {
795 try {
796 throw ...;
797 } cleanup {
798 try {
799 throw ...;
800 } catch (...) {
801 }
802 }
803 } catch (...) {
804 yyy;
805 }
806 E.g. with an inline destructor with an embedded try block. In this
807 case we must save the runtime EH data around the nested exception.
808
809 This complication means that any time the previous runtime data might
810 be used (via fallthru from the finally) we handle the eh case here,
811 whether or not protect_cleanup_actions is active. */
812
813 finally_may_fallthru = block_may_fallthru (finally);
814 if (!finally_may_fallthru && !protect_cleanup_actions)
815 return;
816
817 /* Duplicate the FINALLY block. Only need to do this for try-finally,
818 and not for cleanups. */
819 if (this_state)
820 finally = lower_try_finally_dup_block (finally, outer_state);
821
822 /* Resume execution after the exception. Adding this now lets
823 lower_eh_filter not add unnecessary gotos, as it is clear that
824 we never fallthru from this copy of the finally block. */
825 if (finally_may_fallthru)
826 {
827 tree save_eptr, save_filt;
828
829 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
830 save_filt = create_tmp_var (integer_type_node, "save_filt");
831
832 i = tsi_start (finally);
833 x = build (EXC_PTR_EXPR, ptr_type_node);
834 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
835 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
836
837 x = build (FILTER_EXPR, integer_type_node);
838 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
839 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
840
841 i = tsi_last (finally);
842 x = build (EXC_PTR_EXPR, ptr_type_node);
843 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
844 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
845
846 x = build (FILTER_EXPR, integer_type_node);
847 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
848 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
849
850 x = build_resx (get_eh_region_number (tf->region));
851 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
852 }
853
854 /* Wrap the block with protect_cleanup_actions as the action. */
855 if (protect_cleanup_actions)
856 {
857 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
858 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
859 EH_FILTER_MUST_NOT_THROW (x) = 1;
860 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
861 lower_eh_filter (outer_state, &finally);
862 }
863 else
864 lower_eh_constructs_1 (outer_state, &finally);
865
866 /* Hook this up to the end of the existing try block. If we
867 previously fell through the end, we'll have to branch around.
868 This means adding a new goto, and adding it to the queue. */
869
870 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
871
872 if (tf->may_fallthru)
873 {
874 x = lower_try_finally_fallthru_label (tf);
875 x = build1 (GOTO_EXPR, void_type_node, x);
876 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
877
878 if (this_state)
879 maybe_record_in_goto_queue (this_state, x);
880
881 tf->may_fallthru = false;
882 }
883
884 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
885 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
886 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
887
888 /* Having now been handled, EH isn't to be considered with
889 the rest of the outgoing edges. */
890 tf->may_throw = false;
891 }
892
893 /* A subroutine of lower_try_finally. We have determined that there is
894 no fallthru edge out of the finally block. This means that there is
895 no outgoing edge corresponding to any incoming edge. Restructure the
896 try_finally node for this special case. */
897
898 static void
899 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
900 {
901 tree x, finally, lab, return_val;
902 struct goto_queue_node *q, *qe;
903
904 if (tf->may_throw)
905 lab = tf->eh_label;
906 else
907 lab = create_artificial_label ();
908
909 finally = TREE_OPERAND (*tf->top_p, 1);
910 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
911
912 x = build1 (LABEL_EXPR, void_type_node, lab);
913 append_to_statement_list (x, tf->top_p);
914
915 return_val = NULL;
916 q = tf->goto_queue;
917 qe = q + tf->goto_queue_active;
918 for (; q < qe; ++q)
919 if (q->index < 0)
920 do_return_redirection (q, lab, NULL, &return_val);
921 else
922 do_goto_redirection (q, lab, NULL);
923
924 replace_goto_queue (tf);
925
926 lower_eh_constructs_1 (state, &finally);
927 append_to_statement_list (finally, tf->top_p);
928 }
929
930 /* A subroutine of lower_try_finally. We have determined that there is
931 exactly one destination of the finally block. Restructure the
932 try_finally node for this special case. */
933
934 static void
935 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
936 {
937 struct goto_queue_node *q, *qe;
938 tree x, finally, finally_label;
939
940 finally = TREE_OPERAND (*tf->top_p, 1);
941 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
942
943 lower_eh_constructs_1 (state, &finally);
944
945 if (tf->may_throw)
946 {
947 /* Only reachable via the exception edge. Add the given label to
948 the head of the FINALLY block. Append a RESX at the end. */
949
950 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
951 append_to_statement_list (x, tf->top_p);
952
953 append_to_statement_list (finally, tf->top_p);
954
955 x = build_resx (get_eh_region_number (tf->region));
956
957 append_to_statement_list (x, tf->top_p);
958
959 return;
960 }
961
962 if (tf->may_fallthru)
963 {
964 /* Only reachable via the fallthru edge. Do nothing but let
965 the two blocks run together; we'll fall out the bottom. */
966 append_to_statement_list (finally, tf->top_p);
967 return;
968 }
969
970 finally_label = create_artificial_label ();
971 x = build1 (LABEL_EXPR, void_type_node, finally_label);
972 append_to_statement_list (x, tf->top_p);
973
974 append_to_statement_list (finally, tf->top_p);
975
976 q = tf->goto_queue;
977 qe = q + tf->goto_queue_active;
978
979 if (tf->may_return)
980 {
981 /* Reachable by return expressions only. Redirect them. */
982 tree return_val = NULL;
983 for (; q < qe; ++q)
984 do_return_redirection (q, finally_label, NULL, &return_val);
985 replace_goto_queue (tf);
986 }
987 else
988 {
989 /* Reachable by goto expressions only. Redirect them. */
990 for (; q < qe; ++q)
991 do_goto_redirection (q, finally_label, NULL);
992 replace_goto_queue (tf);
993
994 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
995 {
996 /* Reachable by goto to fallthru label only. Redirect it
997 to the new label (already created, sadly), and do not
998 emit the final branch out, or the fallthru label. */
999 tf->fallthru_label = NULL;
1000 return;
1001 }
1002 }
1003
1004 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1005 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1006 }
1007
1008 /* A subroutine of lower_try_finally. There are multiple edges incoming
1009 and outgoing from the finally block. Implement this by duplicating the
1010 finally block for every destination. */
1011
1012 static void
1013 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1014 {
1015 tree finally, new_stmt;
1016 tree x;
1017
1018 finally = TREE_OPERAND (*tf->top_p, 1);
1019 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1020
1021 new_stmt = NULL_TREE;
1022
1023 if (tf->may_fallthru)
1024 {
1025 x = lower_try_finally_dup_block (finally, state);
1026 lower_eh_constructs_1 (state, &x);
1027 append_to_statement_list (x, &new_stmt);
1028
1029 x = lower_try_finally_fallthru_label (tf);
1030 x = build1 (GOTO_EXPR, void_type_node, x);
1031 append_to_statement_list (x, &new_stmt);
1032 }
1033
1034 if (tf->may_throw)
1035 {
1036 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1037 append_to_statement_list (x, &new_stmt);
1038
1039 x = lower_try_finally_dup_block (finally, state);
1040 lower_eh_constructs_1 (state, &x);
1041 append_to_statement_list (x, &new_stmt);
1042
1043 x = build_resx (get_eh_region_number (tf->region));
1044 append_to_statement_list (x, &new_stmt);
1045 }
1046
1047 if (tf->goto_queue)
1048 {
1049 struct goto_queue_node *q, *qe;
1050 tree return_val = NULL;
1051 int return_index, index;
1052 struct
1053 {
1054 struct goto_queue_node *q;
1055 tree label;
1056 } *labels;
1057
1058 if (tf->dest_array)
1059 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1060 else
1061 return_index = 0;
1062 labels = xcalloc (sizeof (*labels), return_index + 1);
1063
1064 q = tf->goto_queue;
1065 qe = q + tf->goto_queue_active;
1066 for (; q < qe; q++)
1067 {
1068 index = q->index < 0 ? return_index : q->index;
1069
1070 if (!labels[index].q)
1071 labels[index].q = q;
1072 }
1073
1074 for (index = 0; index < return_index + 1; index++)
1075 {
1076 tree lab;
1077
1078 q = labels[index].q;
1079 if (! q)
1080 continue;
1081
1082 lab = labels[index].label = create_artificial_label ();
1083
1084 if (index == return_index)
1085 do_return_redirection (q, lab, NULL, &return_val);
1086 else
1087 do_goto_redirection (q, lab, NULL);
1088
1089 x = build1 (LABEL_EXPR, void_type_node, lab);
1090 append_to_statement_list (x, &new_stmt);
1091
1092 x = lower_try_finally_dup_block (finally, state);
1093 lower_eh_constructs_1 (state, &x);
1094 append_to_statement_list (x, &new_stmt);
1095
1096 append_to_statement_list (q->cont_stmt, &new_stmt);
1097 maybe_record_in_goto_queue (state, q->cont_stmt);
1098 }
1099
1100 for (q = tf->goto_queue; q < qe; q++)
1101 {
1102 tree lab;
1103
1104 index = q->index < 0 ? return_index : q->index;
1105
1106 if (labels[index].q == q)
1107 continue;
1108
1109 lab = labels[index].label;
1110
1111 if (index == return_index)
1112 do_return_redirection (q, lab, NULL, &return_val);
1113 else
1114 do_goto_redirection (q, lab, NULL);
1115 }
1116
1117 replace_goto_queue (tf);
1118 free (labels);
1119 }
1120
1121 /* Need to link new stmts after running replace_goto_queue due
1122 to not wanting to process the same goto stmts twice. */
1123 append_to_statement_list (new_stmt, tf->top_p);
1124 }
1125
1126 /* A subroutine of lower_try_finally. There are multiple edges incoming
1127 and outgoing from the finally block. Implement this by instrumenting
1128 each incoming edge and creating a switch statement at the end of the
1129 finally block that branches to the appropriate destination. */
1130
1131 static void
1132 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1133 {
1134 struct goto_queue_node *q, *qe;
1135 tree return_val = NULL;
1136 tree finally, finally_tmp, finally_label;
1137 int return_index, eh_index, fallthru_index;
1138 int nlabels, ndests, j, last_case_index;
1139 tree case_label_vec, switch_stmt, last_case, switch_body;
1140 tree x;
1141
1142 /* Mash the TRY block to the head of the chain. */
1143 finally = TREE_OPERAND (*tf->top_p, 1);
1144 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1145
1146 /* Lower the finally block itself. */
1147 lower_eh_constructs_1 (state, &finally);
1148
1149 /* Prepare for switch statement generation. */
1150 if (tf->dest_array)
1151 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1152 else
1153 nlabels = 0;
1154 return_index = nlabels;
1155 eh_index = return_index + tf->may_return;
1156 fallthru_index = eh_index + tf->may_throw;
1157 ndests = fallthru_index + tf->may_fallthru;
1158
1159 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1160 finally_label = create_artificial_label ();
1161
1162 case_label_vec = make_tree_vec (ndests);
1163 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1164 NULL_TREE, case_label_vec);
1165 switch_body = NULL;
1166 last_case = NULL;
1167 last_case_index = 0;
1168
1169 /* Begin inserting code for getting to the finally block. Things
1170 are done in this order to correspond to the sequence the code is
1171 layed out. */
1172
1173 if (tf->may_fallthru)
1174 {
1175 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1176 build_int_cst (NULL_TREE, fallthru_index));
1177 append_to_statement_list (x, tf->top_p);
1178
1179 if (tf->may_throw)
1180 {
1181 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1182 append_to_statement_list (x, tf->top_p);
1183 }
1184
1185
1186 last_case = build (CASE_LABEL_EXPR, void_type_node,
1187 build_int_cst (NULL_TREE, fallthru_index), NULL,
1188 create_artificial_label ());
1189 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1190 last_case_index++;
1191
1192 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1193 append_to_statement_list (x, &switch_body);
1194
1195 x = lower_try_finally_fallthru_label (tf);
1196 x = build1 (GOTO_EXPR, void_type_node, x);
1197 append_to_statement_list (x, &switch_body);
1198 }
1199
1200 if (tf->may_throw)
1201 {
1202 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1203 append_to_statement_list (x, tf->top_p);
1204
1205 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1206 build_int_cst (NULL_TREE, eh_index));
1207 append_to_statement_list (x, tf->top_p);
1208
1209 last_case = build (CASE_LABEL_EXPR, void_type_node,
1210 build_int_cst (NULL_TREE, eh_index), NULL,
1211 create_artificial_label ());
1212 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1213 last_case_index++;
1214
1215 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1216 append_to_statement_list (x, &switch_body);
1217 x = build_resx (get_eh_region_number (tf->region));
1218 append_to_statement_list (x, &switch_body);
1219 }
1220
1221 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1222 append_to_statement_list (x, tf->top_p);
1223
1224 append_to_statement_list (finally, tf->top_p);
1225
1226 /* Redirect each incoming goto edge. */
1227 q = tf->goto_queue;
1228 qe = q + tf->goto_queue_active;
1229 j = last_case_index + tf->may_return;
1230 for (; q < qe; ++q)
1231 {
1232 tree mod;
1233 int switch_id, case_index;
1234
1235 if (q->index < 0)
1236 {
1237 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1238 build_int_cst (NULL_TREE, return_index));
1239 do_return_redirection (q, finally_label, mod, &return_val);
1240 switch_id = return_index;
1241 }
1242 else
1243 {
1244 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1245 build_int_cst (NULL_TREE, q->index));
1246 do_goto_redirection (q, finally_label, mod);
1247 switch_id = q->index;
1248 }
1249
1250 case_index = j + q->index;
1251 if (!TREE_VEC_ELT (case_label_vec, case_index))
1252 TREE_VEC_ELT (case_label_vec, case_index)
1253 = build (CASE_LABEL_EXPR, void_type_node,
1254 build_int_cst (NULL_TREE, switch_id), NULL,
1255 /* We store the cont_stmt in the
1256 CASE_LABEL, so that we can recover it
1257 in the loop below. We don't create
1258 the new label while walking the
1259 goto_queue because pointers don't
1260 offer a stable order. */
1261 q->cont_stmt);
1262 }
1263 for (j = last_case_index; j < last_case_index + nlabels; j++)
1264 {
1265 tree label;
1266 tree cont_stmt;
1267
1268 last_case = TREE_VEC_ELT (case_label_vec, j);
1269
1270 gcc_assert (last_case);
1271
1272 cont_stmt = CASE_LABEL (last_case);
1273
1274 label = create_artificial_label ();
1275 CASE_LABEL (last_case) = label;
1276
1277 x = build (LABEL_EXPR, void_type_node, label);
1278 append_to_statement_list (x, &switch_body);
1279 append_to_statement_list (cont_stmt, &switch_body);
1280 maybe_record_in_goto_queue (state, cont_stmt);
1281 }
1282 replace_goto_queue (tf);
1283
1284 /* Make sure that the last case is the default label, as one is required.
1285 Then sort the labels, which is also required in GIMPLE. */
1286 CASE_LOW (last_case) = NULL;
1287 sort_case_labels (case_label_vec);
1288
1289 /* Need to link switch_stmt after running replace_goto_queue due
1290 to not wanting to process the same goto stmts twice. */
1291 append_to_statement_list (switch_stmt, tf->top_p);
1292 append_to_statement_list (switch_body, tf->top_p);
1293 }
1294
1295 /* Decide whether or not we are going to duplicate the finally block.
1296 There are several considerations.
1297
1298 First, if this is Java, then the finally block contains code
1299 written by the user. It has line numbers associated with it,
1300 so duplicating the block means it's difficult to set a breakpoint.
1301 Since controlling code generation via -g is verboten, we simply
1302 never duplicate code without optimization.
1303
1304 Second, we'd like to prevent egregious code growth. One way to
1305 do this is to estimate the size of the finally block, multiply
1306 that by the number of copies we'd need to make, and compare against
1307 the estimate of the size of the switch machinery we'd have to add. */
1308
1309 static bool
1310 decide_copy_try_finally (int ndests, tree finally)
1311 {
1312 int f_estimate, sw_estimate;
1313
1314 if (!optimize)
1315 return false;
1316
1317 /* Finally estimate N times, plus N gotos. */
1318 f_estimate = estimate_num_insns (finally);
1319 f_estimate = (f_estimate + 1) * ndests;
1320
1321 /* Switch statement (cost 10), N variable assignments, N gotos. */
1322 sw_estimate = 10 + 2 * ndests;
1323
1324 /* Optimize for size clearly wants our best guess. */
1325 if (optimize_size)
1326 return f_estimate < sw_estimate;
1327
1328 /* ??? These numbers are completely made up so far. */
1329 if (optimize > 1)
1330 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1331 else
1332 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1333 }
1334
1335 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1336 to a sequence of labels and blocks, plus the exception region trees
1337 that record all the magic. This is complicated by the need to
1338 arrange for the FINALLY block to be executed on all exits. */
1339
1340 static void
1341 lower_try_finally (struct leh_state *state, tree *tp)
1342 {
1343 struct leh_tf_state this_tf;
1344 struct leh_state this_state;
1345 int ndests;
1346
1347 /* Process the try block. */
1348
1349 memset (&this_tf, 0, sizeof (this_tf));
1350 this_tf.try_finally_expr = *tp;
1351 this_tf.top_p = tp;
1352 this_tf.outer = state;
1353 if (using_eh_for_cleanups_p)
1354 this_tf.region
1355 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1356 else
1357 this_tf.region = NULL;
1358
1359 this_state.cur_region = this_tf.region;
1360 this_state.prev_try = state->prev_try;
1361 this_state.tf = &this_tf;
1362
1363 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1364
1365 /* Determine if the try block is escaped through the bottom. */
1366 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1367
1368 /* Determine if any exceptions are possible within the try block. */
1369 if (using_eh_for_cleanups_p)
1370 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1371 if (this_tf.may_throw)
1372 {
1373 this_tf.eh_label = create_artificial_label ();
1374 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1375 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1376 }
1377
1378 /* Sort the goto queue for efficient searching later. */
1379 if (this_tf.goto_queue_active > 1)
1380 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1381 sizeof (struct goto_queue_node), goto_queue_cmp);
1382
1383 /* Determine how many edges (still) reach the finally block. Or rather,
1384 how many destinations are reached by the finally block. Use this to
1385 determine how we process the finally block itself. */
1386
1387 if (this_tf.dest_array)
1388 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1389 else
1390 ndests = 0;
1391 ndests += this_tf.may_fallthru;
1392 ndests += this_tf.may_return;
1393 ndests += this_tf.may_throw;
1394
1395 /* If the FINALLY block is not reachable, dike it out. */
1396 if (ndests == 0)
1397 *tp = TREE_OPERAND (*tp, 0);
1398
1399 /* If the finally block doesn't fall through, then any destination
1400 we might try to impose there isn't reached either. There may be
1401 some minor amount of cleanup and redirection still needed. */
1402 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1403 lower_try_finally_nofallthru (state, &this_tf);
1404
1405 /* We can easily special-case redirection to a single destination. */
1406 else if (ndests == 1)
1407 lower_try_finally_onedest (state, &this_tf);
1408
1409 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1410 lower_try_finally_copy (state, &this_tf);
1411 else
1412 lower_try_finally_switch (state, &this_tf);
1413
1414 /* If someone requested we add a label at the end of the transformed
1415 block, do so. */
1416 if (this_tf.fallthru_label)
1417 {
1418 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1419 append_to_statement_list (x, tp);
1420 }
1421
1422 if (this_tf.goto_queue)
1423 free (this_tf.goto_queue);
1424 }
1425
1426 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1427 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1428 exception region trees that record all the magic. */
1429
1430 static void
1431 lower_catch (struct leh_state *state, tree *tp)
1432 {
1433 struct eh_region *try_region;
1434 struct leh_state this_state;
1435 tree_stmt_iterator i;
1436 tree out_label;
1437
1438 try_region = gen_eh_region_try (state->cur_region);
1439 this_state.cur_region = try_region;
1440 this_state.prev_try = try_region;
1441 this_state.tf = state->tf;
1442
1443 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1444
1445 if (!get_eh_region_may_contain_throw (try_region))
1446 {
1447 *tp = TREE_OPERAND (*tp, 0);
1448 return;
1449 }
1450
1451 out_label = NULL;
1452 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1453 {
1454 struct eh_region *catch_region;
1455 tree catch, x, eh_label;
1456
1457 catch = tsi_stmt (i);
1458 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1459
1460 this_state.cur_region = catch_region;
1461 this_state.prev_try = state->prev_try;
1462 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1463
1464 eh_label = create_artificial_label ();
1465 set_eh_region_tree_label (catch_region, eh_label);
1466
1467 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1468 tsi_link_before (&i, x, TSI_SAME_STMT);
1469
1470 if (block_may_fallthru (CATCH_BODY (catch)))
1471 {
1472 if (!out_label)
1473 out_label = create_artificial_label ();
1474
1475 x = build1 (GOTO_EXPR, void_type_node, out_label);
1476 append_to_statement_list (x, &CATCH_BODY (catch));
1477 }
1478
1479 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1480 tsi_delink (&i);
1481 }
1482
1483 frob_into_branch_around (tp, NULL, out_label);
1484 }
1485
1486 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1487 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1488 region trees that record all the magic. */
1489
1490 static void
1491 lower_eh_filter (struct leh_state *state, tree *tp)
1492 {
1493 struct leh_state this_state;
1494 struct eh_region *this_region;
1495 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1496 tree eh_label;
1497
1498 if (EH_FILTER_MUST_NOT_THROW (inner))
1499 this_region = gen_eh_region_must_not_throw (state->cur_region);
1500 else
1501 this_region = gen_eh_region_allowed (state->cur_region,
1502 EH_FILTER_TYPES (inner));
1503 this_state = *state;
1504 this_state.cur_region = this_region;
1505
1506 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1507
1508 if (!get_eh_region_may_contain_throw (this_region))
1509 {
1510 *tp = TREE_OPERAND (*tp, 0);
1511 return;
1512 }
1513
1514 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1515 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1516
1517 eh_label = create_artificial_label ();
1518 set_eh_region_tree_label (this_region, eh_label);
1519
1520 frob_into_branch_around (tp, eh_label, NULL);
1521 }
1522
1523 /* Implement a cleanup expression. This is similar to try-finally,
1524 except that we only execute the cleanup block for exception edges. */
1525
1526 static void
1527 lower_cleanup (struct leh_state *state, tree *tp)
1528 {
1529 struct leh_state this_state;
1530 struct eh_region *this_region;
1531 struct leh_tf_state fake_tf;
1532
1533 /* If not using eh, then exception-only cleanups are no-ops. */
1534 if (!flag_exceptions)
1535 {
1536 *tp = TREE_OPERAND (*tp, 0);
1537 lower_eh_constructs_1 (state, tp);
1538 return;
1539 }
1540
1541 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1542 this_state = *state;
1543 this_state.cur_region = this_region;
1544
1545 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1546
1547 if (!get_eh_region_may_contain_throw (this_region))
1548 {
1549 *tp = TREE_OPERAND (*tp, 0);
1550 return;
1551 }
1552
1553 /* Build enough of a try-finally state so that we can reuse
1554 honor_protect_cleanup_actions. */
1555 memset (&fake_tf, 0, sizeof (fake_tf));
1556 fake_tf.top_p = tp;
1557 fake_tf.outer = state;
1558 fake_tf.region = this_region;
1559 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1560 fake_tf.may_throw = true;
1561
1562 fake_tf.eh_label = create_artificial_label ();
1563 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1564
1565 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1566
1567 if (fake_tf.may_throw)
1568 {
1569 /* In this case honor_protect_cleanup_actions had nothing to do,
1570 and we should process this normally. */
1571 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1572 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1573 }
1574 else
1575 {
1576 /* In this case honor_protect_cleanup_actions did nearly all of
1577 the work. All we have left is to append the fallthru_label. */
1578
1579 *tp = TREE_OPERAND (*tp, 0);
1580 if (fake_tf.fallthru_label)
1581 {
1582 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1583 append_to_statement_list (x, tp);
1584 }
1585 }
1586 }
1587
1588 /* Main loop for lowering eh constructs. */
1589
1590 static void
1591 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1592 {
1593 tree_stmt_iterator i;
1594 tree t = *tp;
1595
1596 switch (TREE_CODE (t))
1597 {
1598 case COND_EXPR:
1599 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1600 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1601 break;
1602
1603 case CALL_EXPR:
1604 /* Look for things that can throw exceptions, and record them. */
1605 if (state->cur_region && tree_could_throw_p (t))
1606 {
1607 record_stmt_eh_region (state->cur_region, t);
1608 note_eh_region_may_contain_throw (state->cur_region);
1609 }
1610 break;
1611
1612 case MODIFY_EXPR:
1613 /* Look for things that can throw exceptions, and record them. */
1614 if (state->cur_region && tree_could_throw_p (t))
1615 {
1616 tree op;
1617
1618 record_stmt_eh_region (state->cur_region, t);
1619 note_eh_region_may_contain_throw (state->cur_region);
1620
1621 /* ??? For the benefit of calls.c, converting all this to rtl,
1622 we need to record the call expression, not just the outer
1623 modify statement. */
1624 op = get_call_expr_in (t);
1625 if (op)
1626 record_stmt_eh_region (state->cur_region, op);
1627 }
1628 break;
1629
1630 case GOTO_EXPR:
1631 case RETURN_EXPR:
1632 maybe_record_in_goto_queue (state, t);
1633 break;
1634 case SWITCH_EXPR:
1635 verify_norecord_switch_expr (state, t);
1636 break;
1637
1638 case TRY_FINALLY_EXPR:
1639 lower_try_finally (state, tp);
1640 break;
1641
1642 case TRY_CATCH_EXPR:
1643 i = tsi_start (TREE_OPERAND (t, 1));
1644 switch (TREE_CODE (tsi_stmt (i)))
1645 {
1646 case CATCH_EXPR:
1647 lower_catch (state, tp);
1648 break;
1649 case EH_FILTER_EXPR:
1650 lower_eh_filter (state, tp);
1651 break;
1652 default:
1653 lower_cleanup (state, tp);
1654 break;
1655 }
1656 break;
1657
1658 case STATEMENT_LIST:
1659 for (i = tsi_start (t); !tsi_end_p (i); )
1660 {
1661 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1662 t = tsi_stmt (i);
1663 if (TREE_CODE (t) == STATEMENT_LIST)
1664 {
1665 tsi_link_before (&i, t, TSI_SAME_STMT);
1666 tsi_delink (&i);
1667 }
1668 else
1669 tsi_next (&i);
1670 }
1671 break;
1672
1673 default:
1674 /* A type, a decl, or some kind of statement that we're not
1675 interested in. Don't walk them. */
1676 break;
1677 }
1678 }
1679
1680 static void
1681 lower_eh_constructs (void)
1682 {
1683 struct leh_state null_state;
1684 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1685
1686 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1687 set_eh_throw_stmt_table (cfun, htab_create_ggc (31, struct_ptr_hash,
1688 struct_ptr_eq,
1689 ggc_free));
1690
1691 collect_finally_tree (*tp, NULL);
1692
1693 memset (&null_state, 0, sizeof (null_state));
1694 lower_eh_constructs_1 (&null_state, tp);
1695
1696 htab_delete (finally_tree);
1697
1698 collect_eh_region_array ();
1699 }
1700
1701 struct tree_opt_pass pass_lower_eh =
1702 {
1703 "eh", /* name */
1704 NULL, /* gate */
1705 lower_eh_constructs, /* execute */
1706 NULL, /* sub */
1707 NULL, /* next */
1708 0, /* static_pass_number */
1709 TV_TREE_EH, /* tv_id */
1710 PROP_gimple_lcf, /* properties_required */
1711 PROP_gimple_leh, /* properties_provided */
1712 PROP_gimple_lcf, /* properties_destroyed */
1713 0, /* todo_flags_start */
1714 TODO_dump_func, /* todo_flags_finish */
1715 0 /* letter */
1716 };
1717
1718 \f
1719 /* Construct EH edges for STMT. */
1720
1721 static void
1722 make_eh_edge (struct eh_region *region, void *data)
1723 {
1724 tree stmt, lab;
1725 basic_block src, dst;
1726
1727 stmt = data;
1728 lab = get_eh_region_tree_label (region);
1729
1730 src = bb_for_stmt (stmt);
1731 dst = label_to_block (lab);
1732
1733 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1734 }
1735
1736 void
1737 make_eh_edges (tree stmt)
1738 {
1739 int region_nr;
1740 bool is_resx;
1741
1742 if (TREE_CODE (stmt) == RESX_EXPR)
1743 {
1744 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1745 is_resx = true;
1746 }
1747 else
1748 {
1749 region_nr = lookup_stmt_eh_region (stmt);
1750 if (region_nr < 0)
1751 return;
1752 is_resx = false;
1753 }
1754
1755 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1756 }
1757
1758 static bool mark_eh_edge_found_error;
1759
1760 /* Mark edge make_eh_edge would create for given region by setting it aux
1761 field, output error if something goes wrong. */
1762 static void
1763 mark_eh_edge (struct eh_region *region, void *data)
1764 {
1765 tree stmt, lab;
1766 basic_block src, dst;
1767 edge e;
1768
1769 stmt = data;
1770 lab = get_eh_region_tree_label (region);
1771
1772 src = bb_for_stmt (stmt);
1773 dst = label_to_block (lab);
1774
1775 e = find_edge (src, dst);
1776 if (!e)
1777 {
1778 error ("EH edge %i->%i is missing %i %i.", src->index, dst->index, src, dst);
1779 mark_eh_edge_found_error = true;
1780 }
1781 else if (!(e->flags & EDGE_EH))
1782 {
1783 error ("EH edge %i->%i miss EH flag.", src->index, dst->index);
1784 mark_eh_edge_found_error = true;
1785 }
1786 else if (e->aux)
1787 {
1788 /* ??? might not be mistake. */
1789 error ("EH edge %i->%i has duplicated regions.", src->index, dst->index);
1790 mark_eh_edge_found_error = true;
1791 }
1792 else
1793 e->aux = (void *)1;
1794 }
1795
1796 /* Verify that BB containing stmt as last stmt has precisely the edges
1797 make_eh_edges would create. */
1798 bool
1799 verify_eh_edges (tree stmt)
1800 {
1801 int region_nr;
1802 bool is_resx;
1803 basic_block bb = bb_for_stmt (stmt);
1804 edge_iterator ei;
1805 edge e;
1806
1807 FOR_EACH_EDGE (e, ei, bb->succs)
1808 gcc_assert (!e->aux);
1809 mark_eh_edge_found_error = false;
1810 if (TREE_CODE (stmt) == RESX_EXPR)
1811 {
1812 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1813 is_resx = true;
1814 }
1815 else
1816 {
1817 region_nr = lookup_stmt_eh_region (stmt);
1818 if (region_nr < 0)
1819 {
1820 FOR_EACH_EDGE (e, ei, bb->succs)
1821 if (e->flags & EDGE_EH)
1822 {
1823 error ("BB %i can not throw but has EH edges", bb->index);
1824 return true;
1825 }
1826 return false;
1827 }
1828 if (!tree_could_throw_p (stmt))
1829 {
1830 error ("BB %i last statement has incorrectly set region", bb->index);
1831 return true;
1832 }
1833 is_resx = false;
1834 }
1835
1836 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1837 FOR_EACH_EDGE (e, ei, bb->succs)
1838 {
1839 if ((e->flags & EDGE_EH) && !e->aux)
1840 {
1841 error ("Unnecesary EH edge %i->%i", bb->index, e->dest->index);
1842 mark_eh_edge_found_error = true;
1843 return true;
1844 }
1845 e->aux = NULL;
1846 }
1847 return mark_eh_edge_found_error;
1848 }
1849
1850 \f
1851 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1852 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1853 This routine expects only GIMPLE lhs or rhs input. */
1854
1855 bool
1856 tree_could_trap_p (tree expr)
1857 {
1858 enum tree_code code = TREE_CODE (expr);
1859 bool honor_nans = false;
1860 bool honor_snans = false;
1861 bool fp_operation = false;
1862 bool honor_trapv = false;
1863 tree t, base;
1864
1865 if (TREE_CODE_CLASS (code) == tcc_comparison
1866 || TREE_CODE_CLASS (code) == tcc_unary
1867 || TREE_CODE_CLASS (code) == tcc_binary)
1868 {
1869 t = TREE_TYPE (expr);
1870 fp_operation = FLOAT_TYPE_P (t);
1871 if (fp_operation)
1872 {
1873 honor_nans = flag_trapping_math && !flag_finite_math_only;
1874 honor_snans = flag_signaling_nans != 0;
1875 }
1876 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1877 honor_trapv = true;
1878 }
1879
1880 restart:
1881 switch (code)
1882 {
1883 case COMPONENT_REF:
1884 case REALPART_EXPR:
1885 case IMAGPART_EXPR:
1886 case BIT_FIELD_REF:
1887 case WITH_SIZE_EXPR:
1888 expr = TREE_OPERAND (expr, 0);
1889 code = TREE_CODE (expr);
1890 goto restart;
1891
1892 case ARRAY_RANGE_REF:
1893 /* Let us be conservative here for now. We might be checking bounds of
1894 the access similarly to the case below. */
1895 if (!TREE_THIS_NOTRAP (expr))
1896 return true;
1897
1898 base = TREE_OPERAND (expr, 0);
1899 return tree_could_trap_p (base);
1900
1901 case ARRAY_REF:
1902 base = TREE_OPERAND (expr, 0);
1903 if (tree_could_trap_p (base))
1904 return true;
1905
1906 if (TREE_THIS_NOTRAP (expr))
1907 return false;
1908
1909 return !in_array_bounds_p (expr);
1910
1911 case INDIRECT_REF:
1912 case ALIGN_INDIRECT_REF:
1913 case MISALIGNED_INDIRECT_REF:
1914 return !TREE_THIS_NOTRAP (expr);
1915
1916 case ASM_EXPR:
1917 return TREE_THIS_VOLATILE (expr);
1918
1919 case TRUNC_DIV_EXPR:
1920 case CEIL_DIV_EXPR:
1921 case FLOOR_DIV_EXPR:
1922 case ROUND_DIV_EXPR:
1923 case EXACT_DIV_EXPR:
1924 case CEIL_MOD_EXPR:
1925 case FLOOR_MOD_EXPR:
1926 case ROUND_MOD_EXPR:
1927 case TRUNC_MOD_EXPR:
1928 case RDIV_EXPR:
1929 if (honor_snans || honor_trapv)
1930 return true;
1931 if (fp_operation)
1932 return flag_trapping_math;
1933 t = TREE_OPERAND (expr, 1);
1934 if (!TREE_CONSTANT (t) || integer_zerop (t))
1935 return true;
1936 return false;
1937
1938 case LT_EXPR:
1939 case LE_EXPR:
1940 case GT_EXPR:
1941 case GE_EXPR:
1942 case LTGT_EXPR:
1943 /* Some floating point comparisons may trap. */
1944 return honor_nans;
1945
1946 case EQ_EXPR:
1947 case NE_EXPR:
1948 case UNORDERED_EXPR:
1949 case ORDERED_EXPR:
1950 case UNLT_EXPR:
1951 case UNLE_EXPR:
1952 case UNGT_EXPR:
1953 case UNGE_EXPR:
1954 case UNEQ_EXPR:
1955 return honor_snans;
1956
1957 case CONVERT_EXPR:
1958 case FIX_TRUNC_EXPR:
1959 case FIX_CEIL_EXPR:
1960 case FIX_FLOOR_EXPR:
1961 case FIX_ROUND_EXPR:
1962 /* Conversion of floating point might trap. */
1963 return honor_nans;
1964
1965 case NEGATE_EXPR:
1966 case ABS_EXPR:
1967 case CONJ_EXPR:
1968 /* These operations don't trap with floating point. */
1969 if (honor_trapv)
1970 return true;
1971 return false;
1972
1973 case PLUS_EXPR:
1974 case MINUS_EXPR:
1975 case MULT_EXPR:
1976 /* Any floating arithmetic may trap. */
1977 if (fp_operation && flag_trapping_math)
1978 return true;
1979 if (honor_trapv)
1980 return true;
1981 return false;
1982
1983 case CALL_EXPR:
1984 t = get_callee_fndecl (expr);
1985 /* Assume that calls to weak functions may trap. */
1986 if (!t || !DECL_P (t) || DECL_WEAK (t))
1987 return true;
1988 return false;
1989
1990 default:
1991 /* Any floating arithmetic may trap. */
1992 if (fp_operation && flag_trapping_math)
1993 return true;
1994 return false;
1995 }
1996 }
1997
1998 bool
1999 tree_could_throw_p (tree t)
2000 {
2001 if (!flag_exceptions)
2002 return false;
2003 if (TREE_CODE (t) == MODIFY_EXPR)
2004 {
2005 if (flag_non_call_exceptions
2006 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2007 return true;
2008 t = TREE_OPERAND (t, 1);
2009 }
2010
2011 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2012 t = TREE_OPERAND (t, 0);
2013 if (TREE_CODE (t) == CALL_EXPR)
2014 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2015 if (flag_non_call_exceptions)
2016 return tree_could_trap_p (t);
2017 return false;
2018 }
2019
2020 bool
2021 tree_can_throw_internal (tree stmt)
2022 {
2023 int region_nr = lookup_stmt_eh_region (stmt);
2024 if (region_nr < 0)
2025 return false;
2026 return can_throw_internal_1 (region_nr);
2027 }
2028
2029 bool
2030 tree_can_throw_external (tree stmt)
2031 {
2032 int region_nr = lookup_stmt_eh_region (stmt);
2033 if (region_nr < 0)
2034 return false;
2035 return can_throw_external_1 (region_nr);
2036 }
2037
2038 bool
2039 maybe_clean_eh_stmt (tree stmt)
2040 {
2041 if (!tree_could_throw_p (stmt))
2042 if (remove_stmt_from_eh_region (stmt))
2043 return true;
2044 return false;
2045 }
2046