re PR target/21099 (ICE on mmx intrinsics)
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39
40 \f
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
43
44 void
45 using_eh_for_cleanups (void)
46 {
47 using_eh_for_cleanups_p = 1;
48 }
49 \f
50 /* Misc functions used in this file. */
51
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
55
56 static int
57 struct_ptr_eq (const void *a, const void *b)
58 {
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
62 }
63
64 static hashval_t
65 struct_ptr_hash (const void *a)
66 {
67 const void * const * x = a;
68 return (size_t)*x >> 4;
69 }
70
71 \f
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
75
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
79
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
83
84 struct throw_stmt_node GTY(())
85 {
86 tree stmt;
87 int region_nr;
88 };
89
90 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
91
92 static void
93 record_stmt_eh_region (struct eh_region *region, tree t)
94 {
95 struct throw_stmt_node *n;
96 void **slot;
97
98 if (!region)
99 return;
100
101 n = ggc_alloc (sizeof (*n));
102 n->stmt = t;
103 n->region_nr = get_eh_region_number (region);
104
105 slot = htab_find_slot (throw_stmt_table, n, INSERT);
106 gcc_assert (!*slot);
107 *slot = n;
108 }
109
110 void
111 add_stmt_to_eh_region (tree t, int num)
112 {
113 struct throw_stmt_node *n;
114 void **slot;
115
116 gcc_assert (num >= 0);
117
118 n = ggc_alloc (sizeof (*n));
119 n->stmt = t;
120 n->region_nr = num;
121
122 slot = htab_find_slot (throw_stmt_table, n, INSERT);
123 gcc_assert (!*slot);
124 *slot = n;
125 }
126
127 bool
128 remove_stmt_from_eh_region (tree t)
129 {
130 struct throw_stmt_node dummy;
131 void **slot;
132
133 if (!throw_stmt_table)
134 return false;
135
136 dummy.stmt = t;
137 slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
138 if (slot)
139 {
140 htab_clear_slot (throw_stmt_table, slot);
141 return true;
142 }
143 else
144 return false;
145 }
146
147 int
148 lookup_stmt_eh_region (tree t)
149 {
150 struct throw_stmt_node *p, n;
151
152 if (!throw_stmt_table)
153 return -2;
154
155 n.stmt = t;
156 p = htab_find (throw_stmt_table, &n);
157
158 return (p ? p->region_nr : -1);
159 }
160
161
162 \f
163 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
166
167 struct finally_tree_node
168 {
169 tree child, parent;
170 };
171
172 /* Note that this table is *not* marked GTY. It is short-lived. */
173 static htab_t finally_tree;
174
175 static void
176 record_in_finally_tree (tree child, tree parent)
177 {
178 struct finally_tree_node *n;
179 void **slot;
180
181 n = xmalloc (sizeof (*n));
182 n->child = child;
183 n->parent = parent;
184
185 slot = htab_find_slot (finally_tree, n, INSERT);
186 gcc_assert (!*slot);
187 *slot = n;
188 }
189
190 static void
191 collect_finally_tree (tree t, tree region)
192 {
193 tailrecurse:
194 switch (TREE_CODE (t))
195 {
196 case LABEL_EXPR:
197 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
198 break;
199
200 case TRY_FINALLY_EXPR:
201 record_in_finally_tree (t, region);
202 collect_finally_tree (TREE_OPERAND (t, 0), t);
203 t = TREE_OPERAND (t, 1);
204 goto tailrecurse;
205
206 case TRY_CATCH_EXPR:
207 collect_finally_tree (TREE_OPERAND (t, 0), region);
208 t = TREE_OPERAND (t, 1);
209 goto tailrecurse;
210
211 case CATCH_EXPR:
212 t = CATCH_BODY (t);
213 goto tailrecurse;
214
215 case EH_FILTER_EXPR:
216 t = EH_FILTER_FAILURE (t);
217 goto tailrecurse;
218
219 case STATEMENT_LIST:
220 {
221 tree_stmt_iterator i;
222 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
223 collect_finally_tree (tsi_stmt (i), region);
224 }
225 break;
226
227 default:
228 /* A type, a decl, or some kind of statement that we're not
229 interested in. Don't walk them. */
230 break;
231 }
232 }
233
234 /* Use the finally tree to determine if a jump from START to TARGET
235 would leave the try_finally node that START lives in. */
236
237 static bool
238 outside_finally_tree (tree start, tree target)
239 {
240 struct finally_tree_node n, *p;
241
242 do
243 {
244 n.child = start;
245 p = htab_find (finally_tree, &n);
246 if (!p)
247 return true;
248 start = p->parent;
249 }
250 while (start != target);
251
252 return false;
253 }
254 \f
255 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
256 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
257 The eh region creation is straight-forward, but frobbing all the gotos
258 and such into shape isn't. */
259
260 /* State of the world while lowering. */
261
262 struct leh_state
263 {
264 /* What's "current" while constructing the eh region tree. These
265 correspond to variables of the same name in cfun->eh, which we
266 don't have easy access to. */
267 struct eh_region *cur_region;
268 struct eh_region *prev_try;
269
270 /* Processing of TRY_FINALLY requires a bit more state. This is
271 split out into a separate structure so that we don't have to
272 copy so much when processing other nodes. */
273 struct leh_tf_state *tf;
274 };
275
276 struct leh_tf_state
277 {
278 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
279 is the original TRY_FINALLY_EXPR. We need to retain this so that
280 outside_finally_tree can reliably reference the tree used in the
281 collect_finally_tree data structures. */
282 tree try_finally_expr;
283 tree *top_p;
284
285 /* The state outside this try_finally node. */
286 struct leh_state *outer;
287
288 /* The exception region created for it. */
289 struct eh_region *region;
290
291 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
292 that are seen to escape this TRY_FINALLY_EXPR node. */
293 struct goto_queue_node {
294 tree stmt;
295 tree repl_stmt;
296 tree cont_stmt;
297 int index;
298 } *goto_queue;
299 size_t goto_queue_size;
300 size_t goto_queue_active;
301
302 /* The set of unique labels seen as entries in the goto queue. */
303 varray_type dest_array;
304
305 /* A label to be added at the end of the completed transformed
306 sequence. It will be set if may_fallthru was true *at one time*,
307 though subsequent transformations may have cleared that flag. */
308 tree fallthru_label;
309
310 /* A label that has been registered with except.c to be the
311 landing pad for this try block. */
312 tree eh_label;
313
314 /* True if it is possible to fall out the bottom of the try block.
315 Cleared if the fallthru is converted to a goto. */
316 bool may_fallthru;
317
318 /* True if any entry in goto_queue is a RETURN_EXPR. */
319 bool may_return;
320
321 /* True if the finally block can receive an exception edge.
322 Cleared if the exception case is handled by code duplication. */
323 bool may_throw;
324 };
325
326 static void lower_eh_filter (struct leh_state *, tree *);
327 static void lower_eh_constructs_1 (struct leh_state *, tree *);
328
329 /* Comparison function for qsort/bsearch. We're interested in
330 searching goto queue elements for source statements. */
331
332 static int
333 goto_queue_cmp (const void *x, const void *y)
334 {
335 tree a = ((const struct goto_queue_node *)x)->stmt;
336 tree b = ((const struct goto_queue_node *)y)->stmt;
337 return (a == b ? 0 : a < b ? -1 : 1);
338 }
339
340 /* Search for STMT in the goto queue. Return the replacement,
341 or null if the statement isn't in the queue. */
342
343 static tree
344 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
345 {
346 struct goto_queue_node tmp, *ret;
347 tmp.stmt = stmt;
348 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
349 sizeof (struct goto_queue_node), goto_queue_cmp);
350 return (ret ? ret->repl_stmt : NULL);
351 }
352
353 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
354 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
355 then we can just splat it in, otherwise we add the new stmts immediately
356 after the COND_EXPR and redirect. */
357
358 static void
359 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
360 tree_stmt_iterator *tsi)
361 {
362 tree new, one, label;
363
364 new = find_goto_replacement (tf, *tp);
365 if (!new)
366 return;
367
368 one = expr_only (new);
369 if (one && TREE_CODE (one) == GOTO_EXPR)
370 {
371 *tp = one;
372 return;
373 }
374
375 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
376 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
377
378 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
379 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
380 }
381
382 /* The real work of replace_goto_queue. Returns with TSI updated to
383 point to the next statement. */
384
385 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
386
387 static void
388 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
389 {
390 switch (TREE_CODE (t))
391 {
392 case GOTO_EXPR:
393 case RETURN_EXPR:
394 t = find_goto_replacement (tf, t);
395 if (t)
396 {
397 tsi_link_before (tsi, t, TSI_SAME_STMT);
398 tsi_delink (tsi);
399 return;
400 }
401 break;
402
403 case COND_EXPR:
404 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
405 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
406 break;
407
408 case TRY_FINALLY_EXPR:
409 case TRY_CATCH_EXPR:
410 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
411 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
412 break;
413 case CATCH_EXPR:
414 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
415 break;
416 case EH_FILTER_EXPR:
417 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
418 break;
419
420 case STATEMENT_LIST:
421 gcc_unreachable ();
422
423 default:
424 /* These won't have gotos in them. */
425 break;
426 }
427
428 tsi_next (tsi);
429 }
430
431 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
432
433 static void
434 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
435 {
436 tree_stmt_iterator i = tsi_start (t);
437 while (!tsi_end_p (i))
438 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
439 }
440
441 /* Replace all goto queue members. */
442
443 static void
444 replace_goto_queue (struct leh_tf_state *tf)
445 {
446 if (tf->goto_queue_active == 0)
447 return;
448 replace_goto_queue_stmt_list (*tf->top_p, tf);
449 }
450
451 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
452 node, and if so record that fact in the goto queue associated with that
453 try_finally node. */
454
455 static void
456 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
457 {
458 struct leh_tf_state *tf = state->tf;
459 struct goto_queue_node *q;
460 size_t active, size;
461 int index;
462
463 if (!tf)
464 return;
465
466 switch (TREE_CODE (stmt))
467 {
468 case GOTO_EXPR:
469 {
470 tree lab = GOTO_DESTINATION (stmt);
471
472 /* Computed and non-local gotos do not get processed. Given
473 their nature we can neither tell whether we've escaped the
474 finally block nor redirect them if we knew. */
475 if (TREE_CODE (lab) != LABEL_DECL)
476 return;
477
478 /* No need to record gotos that don't leave the try block. */
479 if (! outside_finally_tree (lab, tf->try_finally_expr))
480 return;
481
482 if (! tf->dest_array)
483 {
484 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
485 VARRAY_PUSH_TREE (tf->dest_array, lab);
486 index = 0;
487 }
488 else
489 {
490 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
491 for (index = 0; index < n; ++index)
492 if (VARRAY_TREE (tf->dest_array, index) == lab)
493 break;
494 if (index == n)
495 VARRAY_PUSH_TREE (tf->dest_array, lab);
496 }
497 }
498 break;
499
500 case RETURN_EXPR:
501 tf->may_return = true;
502 index = -1;
503 break;
504
505 default:
506 gcc_unreachable ();
507 }
508
509 active = tf->goto_queue_active;
510 size = tf->goto_queue_size;
511 if (active >= size)
512 {
513 size = (size ? size * 2 : 32);
514 tf->goto_queue_size = size;
515 tf->goto_queue
516 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
517 }
518
519 q = &tf->goto_queue[active];
520 tf->goto_queue_active = active + 1;
521
522 memset (q, 0, sizeof (*q));
523 q->stmt = stmt;
524 q->index = index;
525 }
526
527 #ifdef ENABLE_CHECKING
528 /* We do not process SWITCH_EXPRs for now. As long as the original source
529 was in fact structured, and we've not yet done jump threading, then none
530 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
531
532 static void
533 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
534 {
535 struct leh_tf_state *tf = state->tf;
536 size_t i, n;
537 tree vec;
538
539 if (!tf)
540 return;
541
542 vec = SWITCH_LABELS (switch_expr);
543 n = TREE_VEC_LENGTH (vec);
544
545 for (i = 0; i < n; ++i)
546 {
547 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
548 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
549 }
550 }
551 #else
552 #define verify_norecord_switch_expr(state, switch_expr)
553 #endif
554
555 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
556 whatever is needed to finish the return. If MOD is non-null, insert it
557 before the new branch. RETURN_VALUE_P is a cache containing a temporary
558 variable to be used in manipulating the value returned from the function. */
559
560 static void
561 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
562 tree *return_value_p)
563 {
564 tree ret_expr = TREE_OPERAND (q->stmt, 0);
565 tree x;
566
567 if (ret_expr)
568 {
569 /* The nasty part about redirecting the return value is that the
570 return value itself is to be computed before the FINALLY block
571 is executed. e.g.
572
573 int x;
574 int foo (void)
575 {
576 x = 0;
577 try {
578 return x;
579 } finally {
580 x++;
581 }
582 }
583
584 should return 0, not 1. Arrange for this to happen by copying
585 computed the return value into a local temporary. This also
586 allows us to redirect multiple return statements through the
587 same destination block; whether this is a net win or not really
588 depends, I guess, but it does make generation of the switch in
589 lower_try_finally_switch easier. */
590
591 switch (TREE_CODE (ret_expr))
592 {
593 case RESULT_DECL:
594 if (!*return_value_p)
595 *return_value_p = ret_expr;
596 else
597 gcc_assert (*return_value_p == ret_expr);
598 q->cont_stmt = q->stmt;
599 break;
600
601 case MODIFY_EXPR:
602 {
603 tree result = TREE_OPERAND (ret_expr, 0);
604 tree new, old = TREE_OPERAND (ret_expr, 1);
605
606 if (!*return_value_p)
607 {
608 if (aggregate_value_p (TREE_TYPE (result),
609 TREE_TYPE (current_function_decl)))
610 /* If this function returns in memory, copy the argument
611 into the return slot now. Otherwise, we might need to
612 worry about magic return semantics, so we need to use a
613 temporary to hold the value until we're actually ready
614 to return. */
615 new = result;
616 else
617 new = create_tmp_var (TREE_TYPE (old), "rettmp");
618 *return_value_p = new;
619 }
620 else
621 new = *return_value_p;
622
623 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
624 append_to_statement_list (x, &q->repl_stmt);
625
626 if (new == result)
627 x = result;
628 else
629 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
630 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
631 }
632
633 default:
634 gcc_unreachable ();
635 }
636 }
637 else
638 {
639 /* If we don't return a value, all return statements are the same. */
640 q->cont_stmt = q->stmt;
641 }
642
643 if (mod)
644 append_to_statement_list (mod, &q->repl_stmt);
645
646 x = build1 (GOTO_EXPR, void_type_node, finlab);
647 append_to_statement_list (x, &q->repl_stmt);
648 }
649
650 /* Similar, but easier, for GOTO_EXPR. */
651
652 static void
653 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
654 {
655 tree x;
656
657 q->cont_stmt = q->stmt;
658 if (mod)
659 append_to_statement_list (mod, &q->repl_stmt);
660
661 x = build1 (GOTO_EXPR, void_type_node, finlab);
662 append_to_statement_list (x, &q->repl_stmt);
663 }
664
665 /* We want to transform
666 try { body; } catch { stuff; }
667 to
668 body; goto over; lab: stuff; over:
669
670 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
671 should be placed before the second operand, or NULL. OVER is
672 an existing label that should be put at the exit, or NULL. */
673
674 static void
675 frob_into_branch_around (tree *tp, tree lab, tree over)
676 {
677 tree x, op1;
678
679 op1 = TREE_OPERAND (*tp, 1);
680 *tp = TREE_OPERAND (*tp, 0);
681
682 if (block_may_fallthru (*tp))
683 {
684 if (!over)
685 over = create_artificial_label ();
686 x = build1 (GOTO_EXPR, void_type_node, over);
687 append_to_statement_list (x, tp);
688 }
689
690 if (lab)
691 {
692 x = build1 (LABEL_EXPR, void_type_node, lab);
693 append_to_statement_list (x, tp);
694 }
695
696 append_to_statement_list (op1, tp);
697
698 if (over)
699 {
700 x = build1 (LABEL_EXPR, void_type_node, over);
701 append_to_statement_list (x, tp);
702 }
703 }
704
705 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
706 Make sure to record all new labels found. */
707
708 static tree
709 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
710 {
711 tree region = NULL;
712
713 t = unsave_expr_now (t);
714
715 if (outer_state->tf)
716 region = outer_state->tf->try_finally_expr;
717 collect_finally_tree (t, region);
718
719 return t;
720 }
721
722 /* A subroutine of lower_try_finally. Create a fallthru label for
723 the given try_finally state. The only tricky bit here is that
724 we have to make sure to record the label in our outer context. */
725
726 static tree
727 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
728 {
729 tree label = tf->fallthru_label;
730 if (!label)
731 {
732 label = create_artificial_label ();
733 tf->fallthru_label = label;
734 if (tf->outer->tf)
735 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
736 }
737 return label;
738 }
739
740 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
741 returns non-null, then the language requires that the exception path out
742 of a try_finally be treated specially. To wit: the code within the
743 finally block may not itself throw an exception. We have two choices here.
744 First we can duplicate the finally block and wrap it in a must_not_throw
745 region. Second, we can generate code like
746
747 try {
748 finally_block;
749 } catch {
750 if (fintmp == eh_edge)
751 protect_cleanup_actions;
752 }
753
754 where "fintmp" is the temporary used in the switch statement generation
755 alternative considered below. For the nonce, we always choose the first
756 option.
757
758 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
759
760 static void
761 honor_protect_cleanup_actions (struct leh_state *outer_state,
762 struct leh_state *this_state,
763 struct leh_tf_state *tf)
764 {
765 tree protect_cleanup_actions, finally, x;
766 tree_stmt_iterator i;
767 bool finally_may_fallthru;
768
769 /* First check for nothing to do. */
770 if (lang_protect_cleanup_actions)
771 protect_cleanup_actions = lang_protect_cleanup_actions ();
772 else
773 protect_cleanup_actions = NULL;
774
775 finally = TREE_OPERAND (*tf->top_p, 1);
776
777 /* If the EH case of the finally block can fall through, this may be a
778 structure of the form
779 try {
780 try {
781 throw ...;
782 } cleanup {
783 try {
784 throw ...;
785 } catch (...) {
786 }
787 }
788 } catch (...) {
789 yyy;
790 }
791 E.g. with an inline destructor with an embedded try block. In this
792 case we must save the runtime EH data around the nested exception.
793
794 This complication means that any time the previous runtime data might
795 be used (via fallthru from the finally) we handle the eh case here,
796 whether or not protect_cleanup_actions is active. */
797
798 finally_may_fallthru = block_may_fallthru (finally);
799 if (!finally_may_fallthru && !protect_cleanup_actions)
800 return;
801
802 /* Duplicate the FINALLY block. Only need to do this for try-finally,
803 and not for cleanups. */
804 if (this_state)
805 finally = lower_try_finally_dup_block (finally, outer_state);
806
807 /* Resume execution after the exception. Adding this now lets
808 lower_eh_filter not add unnecessary gotos, as it is clear that
809 we never fallthru from this copy of the finally block. */
810 if (finally_may_fallthru)
811 {
812 tree save_eptr, save_filt;
813
814 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
815 save_filt = create_tmp_var (integer_type_node, "save_filt");
816
817 i = tsi_start (finally);
818 x = build (EXC_PTR_EXPR, ptr_type_node);
819 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
820 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
821
822 x = build (FILTER_EXPR, integer_type_node);
823 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
824 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
825
826 i = tsi_last (finally);
827 x = build (EXC_PTR_EXPR, ptr_type_node);
828 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
829 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
830
831 x = build (FILTER_EXPR, integer_type_node);
832 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
833 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
834
835 x = build1 (RESX_EXPR, void_type_node,
836 build_int_cst (NULL_TREE,
837 get_eh_region_number (tf->region)));
838 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
839 }
840
841 /* Wrap the block with protect_cleanup_actions as the action. */
842 if (protect_cleanup_actions)
843 {
844 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
845 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
846 EH_FILTER_MUST_NOT_THROW (x) = 1;
847 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
848 lower_eh_filter (outer_state, &finally);
849 }
850 else
851 lower_eh_constructs_1 (outer_state, &finally);
852
853 /* Hook this up to the end of the existing try block. If we
854 previously fell through the end, we'll have to branch around.
855 This means adding a new goto, and adding it to the queue. */
856
857 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
858
859 if (tf->may_fallthru)
860 {
861 x = lower_try_finally_fallthru_label (tf);
862 x = build1 (GOTO_EXPR, void_type_node, x);
863 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
864
865 if (this_state)
866 maybe_record_in_goto_queue (this_state, x);
867
868 tf->may_fallthru = false;
869 }
870
871 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
872 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
873 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
874
875 /* Having now been handled, EH isn't to be considered with
876 the rest of the outgoing edges. */
877 tf->may_throw = false;
878 }
879
880 /* A subroutine of lower_try_finally. We have determined that there is
881 no fallthru edge out of the finally block. This means that there is
882 no outgoing edge corresponding to any incoming edge. Restructure the
883 try_finally node for this special case. */
884
885 static void
886 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
887 {
888 tree x, finally, lab, return_val;
889 struct goto_queue_node *q, *qe;
890
891 if (tf->may_throw)
892 lab = tf->eh_label;
893 else
894 lab = create_artificial_label ();
895
896 finally = TREE_OPERAND (*tf->top_p, 1);
897 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
898
899 x = build1 (LABEL_EXPR, void_type_node, lab);
900 append_to_statement_list (x, tf->top_p);
901
902 return_val = NULL;
903 q = tf->goto_queue;
904 qe = q + tf->goto_queue_active;
905 for (; q < qe; ++q)
906 if (q->index < 0)
907 do_return_redirection (q, lab, NULL, &return_val);
908 else
909 do_goto_redirection (q, lab, NULL);
910
911 replace_goto_queue (tf);
912
913 lower_eh_constructs_1 (state, &finally);
914 append_to_statement_list (finally, tf->top_p);
915 }
916
917 /* A subroutine of lower_try_finally. We have determined that there is
918 exactly one destination of the finally block. Restructure the
919 try_finally node for this special case. */
920
921 static void
922 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
923 {
924 struct goto_queue_node *q, *qe;
925 tree x, finally, finally_label;
926
927 finally = TREE_OPERAND (*tf->top_p, 1);
928 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
929
930 lower_eh_constructs_1 (state, &finally);
931
932 if (tf->may_throw)
933 {
934 /* Only reachable via the exception edge. Add the given label to
935 the head of the FINALLY block. Append a RESX at the end. */
936
937 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
938 append_to_statement_list (x, tf->top_p);
939
940 append_to_statement_list (finally, tf->top_p);
941
942 x = build1 (RESX_EXPR, void_type_node,
943 build_int_cst (NULL_TREE,
944 get_eh_region_number (tf->region)));
945 append_to_statement_list (x, tf->top_p);
946
947 return;
948 }
949
950 if (tf->may_fallthru)
951 {
952 /* Only reachable via the fallthru edge. Do nothing but let
953 the two blocks run together; we'll fall out the bottom. */
954 append_to_statement_list (finally, tf->top_p);
955 return;
956 }
957
958 finally_label = create_artificial_label ();
959 x = build1 (LABEL_EXPR, void_type_node, finally_label);
960 append_to_statement_list (x, tf->top_p);
961
962 append_to_statement_list (finally, tf->top_p);
963
964 q = tf->goto_queue;
965 qe = q + tf->goto_queue_active;
966
967 if (tf->may_return)
968 {
969 /* Reachable by return expressions only. Redirect them. */
970 tree return_val = NULL;
971 for (; q < qe; ++q)
972 do_return_redirection (q, finally_label, NULL, &return_val);
973 replace_goto_queue (tf);
974 }
975 else
976 {
977 /* Reachable by goto expressions only. Redirect them. */
978 for (; q < qe; ++q)
979 do_goto_redirection (q, finally_label, NULL);
980 replace_goto_queue (tf);
981
982 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
983 {
984 /* Reachable by goto to fallthru label only. Redirect it
985 to the new label (already created, sadly), and do not
986 emit the final branch out, or the fallthru label. */
987 tf->fallthru_label = NULL;
988 return;
989 }
990 }
991
992 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
993 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
994 }
995
996 /* A subroutine of lower_try_finally. There are multiple edges incoming
997 and outgoing from the finally block. Implement this by duplicating the
998 finally block for every destination. */
999
1000 static void
1001 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1002 {
1003 tree finally, new_stmt;
1004 tree x;
1005
1006 finally = TREE_OPERAND (*tf->top_p, 1);
1007 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1008
1009 new_stmt = NULL_TREE;
1010
1011 if (tf->may_fallthru)
1012 {
1013 x = lower_try_finally_dup_block (finally, state);
1014 lower_eh_constructs_1 (state, &x);
1015 append_to_statement_list (x, &new_stmt);
1016
1017 x = lower_try_finally_fallthru_label (tf);
1018 x = build1 (GOTO_EXPR, void_type_node, x);
1019 append_to_statement_list (x, &new_stmt);
1020 }
1021
1022 if (tf->may_throw)
1023 {
1024 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1025 append_to_statement_list (x, &new_stmt);
1026
1027 x = lower_try_finally_dup_block (finally, state);
1028 lower_eh_constructs_1 (state, &x);
1029 append_to_statement_list (x, &new_stmt);
1030
1031 x = build1 (RESX_EXPR, void_type_node,
1032 build_int_cst (NULL_TREE,
1033 get_eh_region_number (tf->region)));
1034 append_to_statement_list (x, &new_stmt);
1035 }
1036
1037 if (tf->goto_queue)
1038 {
1039 struct goto_queue_node *q, *qe;
1040 tree return_val = NULL;
1041 int return_index, index;
1042 struct
1043 {
1044 struct goto_queue_node *q;
1045 tree label;
1046 } *labels;
1047
1048 if (tf->dest_array)
1049 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1050 else
1051 return_index = 0;
1052 labels = xcalloc (sizeof (*labels), return_index + 1);
1053
1054 q = tf->goto_queue;
1055 qe = q + tf->goto_queue_active;
1056 for (; q < qe; q++)
1057 {
1058 index = q->index < 0 ? return_index : q->index;
1059
1060 if (!labels[index].q)
1061 labels[index].q = q;
1062 }
1063
1064 for (index = 0; index < return_index + 1; index++)
1065 {
1066 tree lab;
1067
1068 q = labels[index].q;
1069 if (! q)
1070 continue;
1071
1072 lab = labels[index].label = create_artificial_label ();
1073
1074 if (index == return_index)
1075 do_return_redirection (q, lab, NULL, &return_val);
1076 else
1077 do_goto_redirection (q, lab, NULL);
1078
1079 x = build1 (LABEL_EXPR, void_type_node, lab);
1080 append_to_statement_list (x, &new_stmt);
1081
1082 x = lower_try_finally_dup_block (finally, state);
1083 lower_eh_constructs_1 (state, &x);
1084 append_to_statement_list (x, &new_stmt);
1085
1086 append_to_statement_list (q->cont_stmt, &new_stmt);
1087 maybe_record_in_goto_queue (state, q->cont_stmt);
1088 }
1089
1090 for (q = tf->goto_queue; q < qe; q++)
1091 {
1092 tree lab;
1093
1094 index = q->index < 0 ? return_index : q->index;
1095
1096 if (labels[index].q == q)
1097 continue;
1098
1099 lab = labels[index].label;
1100
1101 if (index == return_index)
1102 do_return_redirection (q, lab, NULL, &return_val);
1103 else
1104 do_goto_redirection (q, lab, NULL);
1105 }
1106
1107 replace_goto_queue (tf);
1108 free (labels);
1109 }
1110
1111 /* Need to link new stmts after running replace_goto_queue due
1112 to not wanting to process the same goto stmts twice. */
1113 append_to_statement_list (new_stmt, tf->top_p);
1114 }
1115
1116 /* A subroutine of lower_try_finally. There are multiple edges incoming
1117 and outgoing from the finally block. Implement this by instrumenting
1118 each incoming edge and creating a switch statement at the end of the
1119 finally block that branches to the appropriate destination. */
1120
1121 static void
1122 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1123 {
1124 struct goto_queue_node *q, *qe;
1125 tree return_val = NULL;
1126 tree finally, finally_tmp, finally_label;
1127 int return_index, eh_index, fallthru_index;
1128 int nlabels, ndests, j, last_case_index;
1129 tree case_label_vec, switch_stmt, last_case, switch_body;
1130 tree x;
1131
1132 /* Mash the TRY block to the head of the chain. */
1133 finally = TREE_OPERAND (*tf->top_p, 1);
1134 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1135
1136 /* Lower the finally block itself. */
1137 lower_eh_constructs_1 (state, &finally);
1138
1139 /* Prepare for switch statement generation. */
1140 if (tf->dest_array)
1141 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1142 else
1143 nlabels = 0;
1144 return_index = nlabels;
1145 eh_index = return_index + tf->may_return;
1146 fallthru_index = eh_index + tf->may_throw;
1147 ndests = fallthru_index + tf->may_fallthru;
1148
1149 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1150 finally_label = create_artificial_label ();
1151
1152 case_label_vec = make_tree_vec (ndests);
1153 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1154 NULL_TREE, case_label_vec);
1155 switch_body = NULL;
1156 last_case = NULL;
1157 last_case_index = 0;
1158
1159 /* Begin inserting code for getting to the finally block. Things
1160 are done in this order to correspond to the sequence the code is
1161 layed out. */
1162
1163 if (tf->may_fallthru)
1164 {
1165 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1166 build_int_cst (NULL_TREE, fallthru_index));
1167 append_to_statement_list (x, tf->top_p);
1168
1169 if (tf->may_throw)
1170 {
1171 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1172 append_to_statement_list (x, tf->top_p);
1173 }
1174
1175
1176 last_case = build (CASE_LABEL_EXPR, void_type_node,
1177 build_int_cst (NULL_TREE, fallthru_index), NULL,
1178 create_artificial_label ());
1179 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1180 last_case_index++;
1181
1182 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1183 append_to_statement_list (x, &switch_body);
1184
1185 x = lower_try_finally_fallthru_label (tf);
1186 x = build1 (GOTO_EXPR, void_type_node, x);
1187 append_to_statement_list (x, &switch_body);
1188 }
1189
1190 if (tf->may_throw)
1191 {
1192 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1193 append_to_statement_list (x, tf->top_p);
1194
1195 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1196 build_int_cst (NULL_TREE, eh_index));
1197 append_to_statement_list (x, tf->top_p);
1198
1199 last_case = build (CASE_LABEL_EXPR, void_type_node,
1200 build_int_cst (NULL_TREE, eh_index), NULL,
1201 create_artificial_label ());
1202 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1203 last_case_index++;
1204
1205 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1206 append_to_statement_list (x, &switch_body);
1207 x = build1 (RESX_EXPR, void_type_node,
1208 build_int_cst (NULL_TREE,
1209 get_eh_region_number (tf->region)));
1210 append_to_statement_list (x, &switch_body);
1211 }
1212
1213 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1214 append_to_statement_list (x, tf->top_p);
1215
1216 append_to_statement_list (finally, tf->top_p);
1217
1218 /* Redirect each incoming goto edge. */
1219 q = tf->goto_queue;
1220 qe = q + tf->goto_queue_active;
1221 j = last_case_index + tf->may_return;
1222 for (; q < qe; ++q)
1223 {
1224 tree mod;
1225 int switch_id, case_index;
1226
1227 if (q->index < 0)
1228 {
1229 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1230 build_int_cst (NULL_TREE, return_index));
1231 do_return_redirection (q, finally_label, mod, &return_val);
1232 switch_id = return_index;
1233 }
1234 else
1235 {
1236 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1237 build_int_cst (NULL_TREE, q->index));
1238 do_goto_redirection (q, finally_label, mod);
1239 switch_id = q->index;
1240 }
1241
1242 case_index = j + q->index;
1243 if (!TREE_VEC_ELT (case_label_vec, case_index))
1244 TREE_VEC_ELT (case_label_vec, case_index)
1245 = build (CASE_LABEL_EXPR, void_type_node,
1246 build_int_cst (NULL_TREE, switch_id), NULL,
1247 /* We store the cont_stmt in the
1248 CASE_LABEL, so that we can recover it
1249 in the loop below. We don't create
1250 the new label while walking the
1251 goto_queue because pointers don't
1252 offer a stable order. */
1253 q->cont_stmt);
1254 }
1255 for (j = last_case_index; j < last_case_index + nlabels; j++)
1256 {
1257 tree label;
1258 tree cont_stmt;
1259
1260 last_case = TREE_VEC_ELT (case_label_vec, j);
1261
1262 gcc_assert (last_case);
1263
1264 cont_stmt = CASE_LABEL (last_case);
1265
1266 label = create_artificial_label ();
1267 CASE_LABEL (last_case) = label;
1268
1269 x = build (LABEL_EXPR, void_type_node, label);
1270 append_to_statement_list (x, &switch_body);
1271 append_to_statement_list (cont_stmt, &switch_body);
1272 maybe_record_in_goto_queue (state, cont_stmt);
1273 }
1274 replace_goto_queue (tf);
1275
1276 /* Make sure that the last case is the default label, as one is required.
1277 Then sort the labels, which is also required in GIMPLE. */
1278 CASE_LOW (last_case) = NULL;
1279 sort_case_labels (case_label_vec);
1280
1281 /* Need to link switch_stmt after running replace_goto_queue due
1282 to not wanting to process the same goto stmts twice. */
1283 append_to_statement_list (switch_stmt, tf->top_p);
1284 append_to_statement_list (switch_body, tf->top_p);
1285 }
1286
1287 /* Decide whether or not we are going to duplicate the finally block.
1288 There are several considerations.
1289
1290 First, if this is Java, then the finally block contains code
1291 written by the user. It has line numbers associated with it,
1292 so duplicating the block means it's difficult to set a breakpoint.
1293 Since controlling code generation via -g is verboten, we simply
1294 never duplicate code without optimization.
1295
1296 Second, we'd like to prevent egregious code growth. One way to
1297 do this is to estimate the size of the finally block, multiply
1298 that by the number of copies we'd need to make, and compare against
1299 the estimate of the size of the switch machinery we'd have to add. */
1300
1301 static bool
1302 decide_copy_try_finally (int ndests, tree finally)
1303 {
1304 int f_estimate, sw_estimate;
1305
1306 if (!optimize)
1307 return false;
1308
1309 /* Finally estimate N times, plus N gotos. */
1310 f_estimate = estimate_num_insns (finally);
1311 f_estimate = (f_estimate + 1) * ndests;
1312
1313 /* Switch statement (cost 10), N variable assignments, N gotos. */
1314 sw_estimate = 10 + 2 * ndests;
1315
1316 /* Optimize for size clearly wants our best guess. */
1317 if (optimize_size)
1318 return f_estimate < sw_estimate;
1319
1320 /* ??? These numbers are completely made up so far. */
1321 if (optimize > 1)
1322 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1323 else
1324 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1325 }
1326
1327 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1328 to a sequence of labels and blocks, plus the exception region trees
1329 that record all the magic. This is complicated by the need to
1330 arrange for the FINALLY block to be executed on all exits. */
1331
1332 static void
1333 lower_try_finally (struct leh_state *state, tree *tp)
1334 {
1335 struct leh_tf_state this_tf;
1336 struct leh_state this_state;
1337 int ndests;
1338
1339 /* Process the try block. */
1340
1341 memset (&this_tf, 0, sizeof (this_tf));
1342 this_tf.try_finally_expr = *tp;
1343 this_tf.top_p = tp;
1344 this_tf.outer = state;
1345 if (using_eh_for_cleanups_p)
1346 this_tf.region
1347 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1348 else
1349 this_tf.region = NULL;
1350
1351 this_state.cur_region = this_tf.region;
1352 this_state.prev_try = state->prev_try;
1353 this_state.tf = &this_tf;
1354
1355 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1356
1357 /* Determine if the try block is escaped through the bottom. */
1358 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1359
1360 /* Determine if any exceptions are possible within the try block. */
1361 if (using_eh_for_cleanups_p)
1362 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1363 if (this_tf.may_throw)
1364 {
1365 this_tf.eh_label = create_artificial_label ();
1366 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1367 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1368 }
1369
1370 /* Sort the goto queue for efficient searching later. */
1371 if (this_tf.goto_queue_active > 1)
1372 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1373 sizeof (struct goto_queue_node), goto_queue_cmp);
1374
1375 /* Determine how many edges (still) reach the finally block. Or rather,
1376 how many destinations are reached by the finally block. Use this to
1377 determine how we process the finally block itself. */
1378
1379 if (this_tf.dest_array)
1380 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1381 else
1382 ndests = 0;
1383 ndests += this_tf.may_fallthru;
1384 ndests += this_tf.may_return;
1385 ndests += this_tf.may_throw;
1386
1387 /* If the FINALLY block is not reachable, dike it out. */
1388 if (ndests == 0)
1389 *tp = TREE_OPERAND (*tp, 0);
1390
1391 /* If the finally block doesn't fall through, then any destination
1392 we might try to impose there isn't reached either. There may be
1393 some minor amount of cleanup and redirection still needed. */
1394 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1395 lower_try_finally_nofallthru (state, &this_tf);
1396
1397 /* We can easily special-case redirection to a single destination. */
1398 else if (ndests == 1)
1399 lower_try_finally_onedest (state, &this_tf);
1400
1401 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1402 lower_try_finally_copy (state, &this_tf);
1403 else
1404 lower_try_finally_switch (state, &this_tf);
1405
1406 /* If someone requested we add a label at the end of the transformed
1407 block, do so. */
1408 if (this_tf.fallthru_label)
1409 {
1410 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1411 append_to_statement_list (x, tp);
1412 }
1413
1414 if (this_tf.goto_queue)
1415 free (this_tf.goto_queue);
1416 }
1417
1418 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1419 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1420 exception region trees that record all the magic. */
1421
1422 static void
1423 lower_catch (struct leh_state *state, tree *tp)
1424 {
1425 struct eh_region *try_region;
1426 struct leh_state this_state;
1427 tree_stmt_iterator i;
1428 tree out_label;
1429
1430 try_region = gen_eh_region_try (state->cur_region);
1431 this_state.cur_region = try_region;
1432 this_state.prev_try = try_region;
1433 this_state.tf = state->tf;
1434
1435 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1436
1437 if (!get_eh_region_may_contain_throw (try_region))
1438 {
1439 *tp = TREE_OPERAND (*tp, 0);
1440 return;
1441 }
1442
1443 out_label = NULL;
1444 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1445 {
1446 struct eh_region *catch_region;
1447 tree catch, x, eh_label;
1448
1449 catch = tsi_stmt (i);
1450 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1451
1452 this_state.cur_region = catch_region;
1453 this_state.prev_try = state->prev_try;
1454 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1455
1456 eh_label = create_artificial_label ();
1457 set_eh_region_tree_label (catch_region, eh_label);
1458
1459 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1460 tsi_link_before (&i, x, TSI_SAME_STMT);
1461
1462 if (block_may_fallthru (CATCH_BODY (catch)))
1463 {
1464 if (!out_label)
1465 out_label = create_artificial_label ();
1466
1467 x = build1 (GOTO_EXPR, void_type_node, out_label);
1468 append_to_statement_list (x, &CATCH_BODY (catch));
1469 }
1470
1471 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1472 tsi_delink (&i);
1473 }
1474
1475 frob_into_branch_around (tp, NULL, out_label);
1476 }
1477
1478 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1479 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1480 region trees that record all the magic. */
1481
1482 static void
1483 lower_eh_filter (struct leh_state *state, tree *tp)
1484 {
1485 struct leh_state this_state;
1486 struct eh_region *this_region;
1487 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1488 tree eh_label;
1489
1490 if (EH_FILTER_MUST_NOT_THROW (inner))
1491 this_region = gen_eh_region_must_not_throw (state->cur_region);
1492 else
1493 this_region = gen_eh_region_allowed (state->cur_region,
1494 EH_FILTER_TYPES (inner));
1495 this_state = *state;
1496 this_state.cur_region = this_region;
1497
1498 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1499
1500 if (!get_eh_region_may_contain_throw (this_region))
1501 {
1502 *tp = TREE_OPERAND (*tp, 0);
1503 return;
1504 }
1505
1506 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1507 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1508
1509 eh_label = create_artificial_label ();
1510 set_eh_region_tree_label (this_region, eh_label);
1511
1512 frob_into_branch_around (tp, eh_label, NULL);
1513 }
1514
1515 /* Implement a cleanup expression. This is similar to try-finally,
1516 except that we only execute the cleanup block for exception edges. */
1517
1518 static void
1519 lower_cleanup (struct leh_state *state, tree *tp)
1520 {
1521 struct leh_state this_state;
1522 struct eh_region *this_region;
1523 struct leh_tf_state fake_tf;
1524
1525 /* If not using eh, then exception-only cleanups are no-ops. */
1526 if (!flag_exceptions)
1527 {
1528 *tp = TREE_OPERAND (*tp, 0);
1529 lower_eh_constructs_1 (state, tp);
1530 return;
1531 }
1532
1533 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1534 this_state = *state;
1535 this_state.cur_region = this_region;
1536
1537 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1538
1539 if (!get_eh_region_may_contain_throw (this_region))
1540 {
1541 *tp = TREE_OPERAND (*tp, 0);
1542 return;
1543 }
1544
1545 /* Build enough of a try-finally state so that we can reuse
1546 honor_protect_cleanup_actions. */
1547 memset (&fake_tf, 0, sizeof (fake_tf));
1548 fake_tf.top_p = tp;
1549 fake_tf.outer = state;
1550 fake_tf.region = this_region;
1551 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1552 fake_tf.may_throw = true;
1553
1554 fake_tf.eh_label = create_artificial_label ();
1555 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1556
1557 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1558
1559 if (fake_tf.may_throw)
1560 {
1561 /* In this case honor_protect_cleanup_actions had nothing to do,
1562 and we should process this normally. */
1563 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1564 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1565 }
1566 else
1567 {
1568 /* In this case honor_protect_cleanup_actions did nearly all of
1569 the work. All we have left is to append the fallthru_label. */
1570
1571 *tp = TREE_OPERAND (*tp, 0);
1572 if (fake_tf.fallthru_label)
1573 {
1574 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1575 append_to_statement_list (x, tp);
1576 }
1577 }
1578 }
1579
1580 /* Main loop for lowering eh constructs. */
1581
1582 static void
1583 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1584 {
1585 tree_stmt_iterator i;
1586 tree t = *tp;
1587
1588 switch (TREE_CODE (t))
1589 {
1590 case COND_EXPR:
1591 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1592 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1593 break;
1594
1595 case CALL_EXPR:
1596 /* Look for things that can throw exceptions, and record them. */
1597 if (state->cur_region && tree_could_throw_p (t))
1598 {
1599 record_stmt_eh_region (state->cur_region, t);
1600 note_eh_region_may_contain_throw (state->cur_region);
1601 }
1602 break;
1603
1604 case MODIFY_EXPR:
1605 /* Look for things that can throw exceptions, and record them. */
1606 if (state->cur_region && tree_could_throw_p (t))
1607 {
1608 tree op;
1609
1610 record_stmt_eh_region (state->cur_region, t);
1611 note_eh_region_may_contain_throw (state->cur_region);
1612
1613 /* ??? For the benefit of calls.c, converting all this to rtl,
1614 we need to record the call expression, not just the outer
1615 modify statement. */
1616 op = get_call_expr_in (t);
1617 if (op)
1618 record_stmt_eh_region (state->cur_region, op);
1619 }
1620 break;
1621
1622 case GOTO_EXPR:
1623 case RETURN_EXPR:
1624 maybe_record_in_goto_queue (state, t);
1625 break;
1626 case SWITCH_EXPR:
1627 verify_norecord_switch_expr (state, t);
1628 break;
1629
1630 case TRY_FINALLY_EXPR:
1631 lower_try_finally (state, tp);
1632 break;
1633
1634 case TRY_CATCH_EXPR:
1635 i = tsi_start (TREE_OPERAND (t, 1));
1636 switch (TREE_CODE (tsi_stmt (i)))
1637 {
1638 case CATCH_EXPR:
1639 lower_catch (state, tp);
1640 break;
1641 case EH_FILTER_EXPR:
1642 lower_eh_filter (state, tp);
1643 break;
1644 default:
1645 lower_cleanup (state, tp);
1646 break;
1647 }
1648 break;
1649
1650 case STATEMENT_LIST:
1651 for (i = tsi_start (t); !tsi_end_p (i); )
1652 {
1653 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1654 t = tsi_stmt (i);
1655 if (TREE_CODE (t) == STATEMENT_LIST)
1656 {
1657 tsi_link_before (&i, t, TSI_SAME_STMT);
1658 tsi_delink (&i);
1659 }
1660 else
1661 tsi_next (&i);
1662 }
1663 break;
1664
1665 default:
1666 /* A type, a decl, or some kind of statement that we're not
1667 interested in. Don't walk them. */
1668 break;
1669 }
1670 }
1671
1672 static void
1673 lower_eh_constructs (void)
1674 {
1675 struct leh_state null_state;
1676 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1677
1678 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1679 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
1680 ggc_free);
1681
1682 collect_finally_tree (*tp, NULL);
1683
1684 memset (&null_state, 0, sizeof (null_state));
1685 lower_eh_constructs_1 (&null_state, tp);
1686
1687 htab_delete (finally_tree);
1688
1689 collect_eh_region_array ();
1690 }
1691
1692 struct tree_opt_pass pass_lower_eh =
1693 {
1694 "eh", /* name */
1695 NULL, /* gate */
1696 lower_eh_constructs, /* execute */
1697 NULL, /* sub */
1698 NULL, /* next */
1699 0, /* static_pass_number */
1700 TV_TREE_EH, /* tv_id */
1701 PROP_gimple_lcf, /* properties_required */
1702 PROP_gimple_leh, /* properties_provided */
1703 PROP_gimple_lcf, /* properties_destroyed */
1704 0, /* todo_flags_start */
1705 TODO_dump_func, /* todo_flags_finish */
1706 0 /* letter */
1707 };
1708
1709 \f
1710 /* Construct EH edges for STMT. */
1711
1712 static void
1713 make_eh_edge (struct eh_region *region, void *data)
1714 {
1715 tree stmt, lab;
1716 basic_block src, dst;
1717
1718 stmt = data;
1719 lab = get_eh_region_tree_label (region);
1720
1721 src = bb_for_stmt (stmt);
1722 dst = label_to_block (lab);
1723
1724 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1725 }
1726
1727 void
1728 make_eh_edges (tree stmt)
1729 {
1730 int region_nr;
1731 bool is_resx;
1732
1733 if (TREE_CODE (stmt) == RESX_EXPR)
1734 {
1735 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1736 is_resx = true;
1737 }
1738 else
1739 {
1740 region_nr = lookup_stmt_eh_region (stmt);
1741 if (region_nr < 0)
1742 return;
1743 is_resx = false;
1744 }
1745
1746 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1747 }
1748
1749
1750 \f
1751 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1752 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1753 This routine expects only GIMPLE lhs or rhs input. */
1754
1755 bool
1756 tree_could_trap_p (tree expr)
1757 {
1758 enum tree_code code = TREE_CODE (expr);
1759 bool honor_nans = false;
1760 bool honor_snans = false;
1761 bool fp_operation = false;
1762 bool honor_trapv = false;
1763 tree t, base;
1764
1765 if (TREE_CODE_CLASS (code) == tcc_comparison
1766 || TREE_CODE_CLASS (code) == tcc_unary
1767 || TREE_CODE_CLASS (code) == tcc_binary)
1768 {
1769 t = TREE_TYPE (expr);
1770 fp_operation = FLOAT_TYPE_P (t);
1771 if (fp_operation)
1772 {
1773 honor_nans = flag_trapping_math && !flag_finite_math_only;
1774 honor_snans = flag_signaling_nans != 0;
1775 }
1776 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1777 honor_trapv = true;
1778 }
1779
1780 restart:
1781 switch (code)
1782 {
1783 case COMPONENT_REF:
1784 case REALPART_EXPR:
1785 case IMAGPART_EXPR:
1786 case BIT_FIELD_REF:
1787 case WITH_SIZE_EXPR:
1788 expr = TREE_OPERAND (expr, 0);
1789 code = TREE_CODE (expr);
1790 goto restart;
1791
1792 case ARRAY_RANGE_REF:
1793 /* Let us be conservative here for now. We might be checking bounds of
1794 the access similarly to the case below. */
1795 if (!TREE_THIS_NOTRAP (expr))
1796 return true;
1797
1798 base = TREE_OPERAND (expr, 0);
1799 return tree_could_trap_p (base);
1800
1801 case ARRAY_REF:
1802 base = TREE_OPERAND (expr, 0);
1803 if (tree_could_trap_p (base))
1804 return true;
1805
1806 if (TREE_THIS_NOTRAP (expr))
1807 return false;
1808
1809 return !in_array_bounds_p (expr);
1810
1811 case INDIRECT_REF:
1812 case ALIGN_INDIRECT_REF:
1813 case MISALIGNED_INDIRECT_REF:
1814 return !TREE_THIS_NOTRAP (expr);
1815
1816 case ASM_EXPR:
1817 return TREE_THIS_VOLATILE (expr);
1818
1819 case TRUNC_DIV_EXPR:
1820 case CEIL_DIV_EXPR:
1821 case FLOOR_DIV_EXPR:
1822 case ROUND_DIV_EXPR:
1823 case EXACT_DIV_EXPR:
1824 case CEIL_MOD_EXPR:
1825 case FLOOR_MOD_EXPR:
1826 case ROUND_MOD_EXPR:
1827 case TRUNC_MOD_EXPR:
1828 case RDIV_EXPR:
1829 if (honor_snans || honor_trapv)
1830 return true;
1831 if (fp_operation)
1832 return flag_trapping_math;
1833 t = TREE_OPERAND (expr, 1);
1834 if (!TREE_CONSTANT (t) || integer_zerop (t))
1835 return true;
1836 return false;
1837
1838 case LT_EXPR:
1839 case LE_EXPR:
1840 case GT_EXPR:
1841 case GE_EXPR:
1842 case LTGT_EXPR:
1843 /* Some floating point comparisons may trap. */
1844 return honor_nans;
1845
1846 case EQ_EXPR:
1847 case NE_EXPR:
1848 case UNORDERED_EXPR:
1849 case ORDERED_EXPR:
1850 case UNLT_EXPR:
1851 case UNLE_EXPR:
1852 case UNGT_EXPR:
1853 case UNGE_EXPR:
1854 case UNEQ_EXPR:
1855 return honor_snans;
1856
1857 case CONVERT_EXPR:
1858 case FIX_TRUNC_EXPR:
1859 case FIX_CEIL_EXPR:
1860 case FIX_FLOOR_EXPR:
1861 case FIX_ROUND_EXPR:
1862 /* Conversion of floating point might trap. */
1863 return honor_nans;
1864
1865 case NEGATE_EXPR:
1866 case ABS_EXPR:
1867 case CONJ_EXPR:
1868 /* These operations don't trap with floating point. */
1869 if (honor_trapv)
1870 return true;
1871 return false;
1872
1873 case PLUS_EXPR:
1874 case MINUS_EXPR:
1875 case MULT_EXPR:
1876 /* Any floating arithmetic may trap. */
1877 if (fp_operation && flag_trapping_math)
1878 return true;
1879 if (honor_trapv)
1880 return true;
1881 return false;
1882
1883 case CALL_EXPR:
1884 t = get_callee_fndecl (expr);
1885 /* Assume that calls to weak functions may trap. */
1886 if (!t || !DECL_P (t) || DECL_WEAK (t))
1887 return true;
1888 return false;
1889
1890 default:
1891 /* Any floating arithmetic may trap. */
1892 if (fp_operation && flag_trapping_math)
1893 return true;
1894 return false;
1895 }
1896 }
1897
1898 bool
1899 tree_could_throw_p (tree t)
1900 {
1901 if (!flag_exceptions)
1902 return false;
1903 if (TREE_CODE (t) == MODIFY_EXPR)
1904 {
1905 if (flag_non_call_exceptions
1906 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1907 return true;
1908 t = TREE_OPERAND (t, 1);
1909 }
1910
1911 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1912 t = TREE_OPERAND (t, 0);
1913 if (TREE_CODE (t) == CALL_EXPR)
1914 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1915 if (flag_non_call_exceptions)
1916 return tree_could_trap_p (t);
1917 return false;
1918 }
1919
1920 bool
1921 tree_can_throw_internal (tree stmt)
1922 {
1923 int region_nr;
1924
1925 if (TREE_CODE (stmt) == RESX_EXPR)
1926 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1927 else
1928 region_nr = lookup_stmt_eh_region (stmt);
1929 if (region_nr < 0)
1930 return false;
1931 return can_throw_internal_1 (region_nr);
1932 }
1933
1934 bool
1935 tree_can_throw_external (tree stmt)
1936 {
1937 int region_nr = lookup_stmt_eh_region (stmt);
1938 if (region_nr < 0)
1939 return tree_could_throw_p (stmt);
1940 else
1941 return can_throw_external_1 (region_nr);
1942 }
1943
1944 bool
1945 maybe_clean_eh_stmt (tree stmt)
1946 {
1947 if (!tree_could_throw_p (stmt))
1948 if (remove_stmt_from_eh_region (stmt))
1949 return true;
1950 return false;
1951 }
1952
1953 #include "gt-tree-eh.h"