usage.adb: Change "pragma inline" to "pragma Inline" in information and error messages
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39
40 \f
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
43
44 void
45 using_eh_for_cleanups (void)
46 {
47 using_eh_for_cleanups_p = 1;
48 }
49 \f
50 /* Misc functions used in this file. */
51
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
55
56 static int
57 struct_ptr_eq (const void *a, const void *b)
58 {
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
62 }
63
64 static hashval_t
65 struct_ptr_hash (const void *a)
66 {
67 const void * const * x = a;
68 return (size_t)*x >> 4;
69 }
70
71 \f
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
75
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
79
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
83
84 struct throw_stmt_node GTY(())
85 {
86 tree stmt;
87 int region_nr;
88 };
89
90 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
91
92 static void
93 record_stmt_eh_region (struct eh_region *region, tree t)
94 {
95 struct throw_stmt_node *n;
96 void **slot;
97
98 if (!region)
99 return;
100
101 n = ggc_alloc (sizeof (*n));
102 n->stmt = t;
103 n->region_nr = get_eh_region_number (region);
104
105 slot = htab_find_slot (throw_stmt_table, n, INSERT);
106 gcc_assert (!*slot);
107 *slot = n;
108 }
109
110 void
111 add_stmt_to_eh_region (tree t, int num)
112 {
113 struct throw_stmt_node *n;
114 void **slot;
115
116 gcc_assert (num >= 0);
117
118 n = ggc_alloc (sizeof (*n));
119 n->stmt = t;
120 n->region_nr = num;
121
122 slot = htab_find_slot (throw_stmt_table, n, INSERT);
123 gcc_assert (!*slot);
124 *slot = n;
125 }
126
127 bool
128 remove_stmt_from_eh_region (tree t)
129 {
130 struct throw_stmt_node dummy;
131 void **slot;
132
133 if (!throw_stmt_table)
134 return false;
135
136 dummy.stmt = t;
137 slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
138 if (slot)
139 {
140 htab_clear_slot (throw_stmt_table, slot);
141 return true;
142 }
143 else
144 return false;
145 }
146
147 int
148 lookup_stmt_eh_region (tree t)
149 {
150 struct throw_stmt_node *p, n;
151
152 if (!throw_stmt_table)
153 return -2;
154
155 n.stmt = t;
156 p = htab_find (throw_stmt_table, &n);
157
158 return (p ? p->region_nr : -1);
159 }
160
161
162 \f
163 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
166
167 struct finally_tree_node
168 {
169 tree child, parent;
170 };
171
172 /* Note that this table is *not* marked GTY. It is short-lived. */
173 static htab_t finally_tree;
174
175 static void
176 record_in_finally_tree (tree child, tree parent)
177 {
178 struct finally_tree_node *n;
179 void **slot;
180
181 n = xmalloc (sizeof (*n));
182 n->child = child;
183 n->parent = parent;
184
185 slot = htab_find_slot (finally_tree, n, INSERT);
186 gcc_assert (!*slot);
187 *slot = n;
188 }
189
190 static void
191 collect_finally_tree (tree t, tree region)
192 {
193 tailrecurse:
194 switch (TREE_CODE (t))
195 {
196 case LABEL_EXPR:
197 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
198 break;
199
200 case TRY_FINALLY_EXPR:
201 record_in_finally_tree (t, region);
202 collect_finally_tree (TREE_OPERAND (t, 0), t);
203 t = TREE_OPERAND (t, 1);
204 goto tailrecurse;
205
206 case TRY_CATCH_EXPR:
207 collect_finally_tree (TREE_OPERAND (t, 0), region);
208 t = TREE_OPERAND (t, 1);
209 goto tailrecurse;
210
211 case CATCH_EXPR:
212 t = CATCH_BODY (t);
213 goto tailrecurse;
214
215 case EH_FILTER_EXPR:
216 t = EH_FILTER_FAILURE (t);
217 goto tailrecurse;
218
219 case STATEMENT_LIST:
220 {
221 tree_stmt_iterator i;
222 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
223 collect_finally_tree (tsi_stmt (i), region);
224 }
225 break;
226
227 default:
228 /* A type, a decl, or some kind of statement that we're not
229 interested in. Don't walk them. */
230 break;
231 }
232 }
233
234 /* Use the finally tree to determine if a jump from START to TARGET
235 would leave the try_finally node that START lives in. */
236
237 static bool
238 outside_finally_tree (tree start, tree target)
239 {
240 struct finally_tree_node n, *p;
241
242 do
243 {
244 n.child = start;
245 p = htab_find (finally_tree, &n);
246 if (!p)
247 return true;
248 start = p->parent;
249 }
250 while (start != target);
251
252 return false;
253 }
254 \f
255 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
256 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
257 The eh region creation is straight-forward, but frobbing all the gotos
258 and such into shape isn't. */
259
260 /* State of the world while lowering. */
261
262 struct leh_state
263 {
264 /* What's "current" while constructing the eh region tree. These
265 correspond to variables of the same name in cfun->eh, which we
266 don't have easy access to. */
267 struct eh_region *cur_region;
268 struct eh_region *prev_try;
269
270 /* Processing of TRY_FINALLY requires a bit more state. This is
271 split out into a separate structure so that we don't have to
272 copy so much when processing other nodes. */
273 struct leh_tf_state *tf;
274 };
275
276 struct leh_tf_state
277 {
278 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
279 is the original TRY_FINALLY_EXPR. We need to retain this so that
280 outside_finally_tree can reliably reference the tree used in the
281 collect_finally_tree data structures. */
282 tree try_finally_expr;
283 tree *top_p;
284
285 /* The state outside this try_finally node. */
286 struct leh_state *outer;
287
288 /* The exception region created for it. */
289 struct eh_region *region;
290
291 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
292 that are seen to escape this TRY_FINALLY_EXPR node. */
293 struct goto_queue_node {
294 tree stmt;
295 tree repl_stmt;
296 tree cont_stmt;
297 int index;
298 } *goto_queue;
299 size_t goto_queue_size;
300 size_t goto_queue_active;
301
302 /* The set of unique labels seen as entries in the goto queue. */
303 varray_type dest_array;
304
305 /* A label to be added at the end of the completed transformed
306 sequence. It will be set if may_fallthru was true *at one time*,
307 though subsequent transformations may have cleared that flag. */
308 tree fallthru_label;
309
310 /* A label that has been registered with except.c to be the
311 landing pad for this try block. */
312 tree eh_label;
313
314 /* True if it is possible to fall out the bottom of the try block.
315 Cleared if the fallthru is converted to a goto. */
316 bool may_fallthru;
317
318 /* True if any entry in goto_queue is a RETURN_EXPR. */
319 bool may_return;
320
321 /* True if the finally block can receive an exception edge.
322 Cleared if the exception case is handled by code duplication. */
323 bool may_throw;
324 };
325
326 static void lower_eh_filter (struct leh_state *, tree *);
327 static void lower_eh_constructs_1 (struct leh_state *, tree *);
328
329 /* Comparison function for qsort/bsearch. We're interested in
330 searching goto queue elements for source statements. */
331
332 static int
333 goto_queue_cmp (const void *x, const void *y)
334 {
335 tree a = ((const struct goto_queue_node *)x)->stmt;
336 tree b = ((const struct goto_queue_node *)y)->stmt;
337 return (a == b ? 0 : a < b ? -1 : 1);
338 }
339
340 /* Search for STMT in the goto queue. Return the replacement,
341 or null if the statement isn't in the queue. */
342
343 static tree
344 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
345 {
346 struct goto_queue_node tmp, *ret;
347 tmp.stmt = stmt;
348 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
349 sizeof (struct goto_queue_node), goto_queue_cmp);
350 return (ret ? ret->repl_stmt : NULL);
351 }
352
353 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
354 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
355 then we can just splat it in, otherwise we add the new stmts immediately
356 after the COND_EXPR and redirect. */
357
358 static void
359 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
360 tree_stmt_iterator *tsi)
361 {
362 tree new, one, label;
363
364 new = find_goto_replacement (tf, *tp);
365 if (!new)
366 return;
367
368 one = expr_only (new);
369 if (one && TREE_CODE (one) == GOTO_EXPR)
370 {
371 *tp = one;
372 return;
373 }
374
375 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
376 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
377
378 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
379 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
380 }
381
382 /* The real work of replace_goto_queue. Returns with TSI updated to
383 point to the next statement. */
384
385 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
386
387 static void
388 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
389 {
390 switch (TREE_CODE (t))
391 {
392 case GOTO_EXPR:
393 case RETURN_EXPR:
394 t = find_goto_replacement (tf, t);
395 if (t)
396 {
397 tsi_link_before (tsi, t, TSI_SAME_STMT);
398 tsi_delink (tsi);
399 return;
400 }
401 break;
402
403 case COND_EXPR:
404 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
405 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
406 break;
407
408 case TRY_FINALLY_EXPR:
409 case TRY_CATCH_EXPR:
410 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
411 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
412 break;
413 case CATCH_EXPR:
414 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
415 break;
416 case EH_FILTER_EXPR:
417 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
418 break;
419
420 case STATEMENT_LIST:
421 gcc_unreachable ();
422
423 default:
424 /* These won't have gotos in them. */
425 break;
426 }
427
428 tsi_next (tsi);
429 }
430
431 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
432
433 static void
434 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
435 {
436 tree_stmt_iterator i = tsi_start (t);
437 while (!tsi_end_p (i))
438 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
439 }
440
441 /* Replace all goto queue members. */
442
443 static void
444 replace_goto_queue (struct leh_tf_state *tf)
445 {
446 replace_goto_queue_stmt_list (*tf->top_p, tf);
447 }
448
449 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
450 node, and if so record that fact in the goto queue associated with that
451 try_finally node. */
452
453 static void
454 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
455 {
456 struct leh_tf_state *tf = state->tf;
457 struct goto_queue_node *q;
458 size_t active, size;
459 int index;
460
461 if (!tf)
462 return;
463
464 switch (TREE_CODE (stmt))
465 {
466 case GOTO_EXPR:
467 {
468 tree lab = GOTO_DESTINATION (stmt);
469
470 /* Computed and non-local gotos do not get processed. Given
471 their nature we can neither tell whether we've escaped the
472 finally block nor redirect them if we knew. */
473 if (TREE_CODE (lab) != LABEL_DECL)
474 return;
475
476 /* No need to record gotos that don't leave the try block. */
477 if (! outside_finally_tree (lab, tf->try_finally_expr))
478 return;
479
480 if (! tf->dest_array)
481 {
482 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
483 VARRAY_PUSH_TREE (tf->dest_array, lab);
484 index = 0;
485 }
486 else
487 {
488 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
489 for (index = 0; index < n; ++index)
490 if (VARRAY_TREE (tf->dest_array, index) == lab)
491 break;
492 if (index == n)
493 VARRAY_PUSH_TREE (tf->dest_array, lab);
494 }
495 }
496 break;
497
498 case RETURN_EXPR:
499 tf->may_return = true;
500 index = -1;
501 break;
502
503 default:
504 gcc_unreachable ();
505 }
506
507 active = tf->goto_queue_active;
508 size = tf->goto_queue_size;
509 if (active >= size)
510 {
511 size = (size ? size * 2 : 32);
512 tf->goto_queue_size = size;
513 tf->goto_queue
514 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
515 }
516
517 q = &tf->goto_queue[active];
518 tf->goto_queue_active = active + 1;
519
520 memset (q, 0, sizeof (*q));
521 q->stmt = stmt;
522 q->index = index;
523 }
524
525 #ifdef ENABLE_CHECKING
526 /* We do not process SWITCH_EXPRs for now. As long as the original source
527 was in fact structured, and we've not yet done jump threading, then none
528 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
529
530 static void
531 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
532 {
533 struct leh_tf_state *tf = state->tf;
534 size_t i, n;
535 tree vec;
536
537 if (!tf)
538 return;
539
540 vec = SWITCH_LABELS (switch_expr);
541 n = TREE_VEC_LENGTH (vec);
542
543 for (i = 0; i < n; ++i)
544 {
545 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
546 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
547 }
548 }
549 #else
550 #define verify_norecord_switch_expr(state, switch_expr)
551 #endif
552
553 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
554 whatever is needed to finish the return. If MOD is non-null, insert it
555 before the new branch. RETURN_VALUE_P is a cache containing a temporary
556 variable to be used in manipulating the value returned from the function. */
557
558 static void
559 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
560 tree *return_value_p)
561 {
562 tree ret_expr = TREE_OPERAND (q->stmt, 0);
563 tree x;
564
565 if (ret_expr)
566 {
567 /* The nasty part about redirecting the return value is that the
568 return value itself is to be computed before the FINALLY block
569 is executed. e.g.
570
571 int x;
572 int foo (void)
573 {
574 x = 0;
575 try {
576 return x;
577 } finally {
578 x++;
579 }
580 }
581
582 should return 0, not 1. Arrange for this to happen by copying
583 computed the return value into a local temporary. This also
584 allows us to redirect multiple return statements through the
585 same destination block; whether this is a net win or not really
586 depends, I guess, but it does make generation of the switch in
587 lower_try_finally_switch easier. */
588
589 switch (TREE_CODE (ret_expr))
590 {
591 case RESULT_DECL:
592 if (!*return_value_p)
593 *return_value_p = ret_expr;
594 else
595 gcc_assert (*return_value_p == ret_expr);
596 q->cont_stmt = q->stmt;
597 break;
598
599 case MODIFY_EXPR:
600 {
601 tree result = TREE_OPERAND (ret_expr, 0);
602 tree new, old = TREE_OPERAND (ret_expr, 1);
603
604 if (!*return_value_p)
605 {
606 if (aggregate_value_p (TREE_TYPE (result),
607 TREE_TYPE (current_function_decl)))
608 /* If this function returns in memory, copy the argument
609 into the return slot now. Otherwise, we might need to
610 worry about magic return semantics, so we need to use a
611 temporary to hold the value until we're actually ready
612 to return. */
613 new = result;
614 else
615 new = create_tmp_var (TREE_TYPE (old), "rettmp");
616 *return_value_p = new;
617 }
618 else
619 new = *return_value_p;
620
621 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
622 append_to_statement_list (x, &q->repl_stmt);
623
624 if (new == result)
625 x = result;
626 else
627 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
628 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
629 }
630
631 default:
632 gcc_unreachable ();
633 }
634 }
635 else
636 {
637 /* If we don't return a value, all return statements are the same. */
638 q->cont_stmt = q->stmt;
639 }
640
641 if (mod)
642 append_to_statement_list (mod, &q->repl_stmt);
643
644 x = build1 (GOTO_EXPR, void_type_node, finlab);
645 append_to_statement_list (x, &q->repl_stmt);
646 }
647
648 /* Similar, but easier, for GOTO_EXPR. */
649
650 static void
651 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
652 {
653 tree x;
654
655 q->cont_stmt = q->stmt;
656 if (mod)
657 append_to_statement_list (mod, &q->repl_stmt);
658
659 x = build1 (GOTO_EXPR, void_type_node, finlab);
660 append_to_statement_list (x, &q->repl_stmt);
661 }
662
663 /* We want to transform
664 try { body; } catch { stuff; }
665 to
666 body; goto over; lab: stuff; over:
667
668 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
669 should be placed before the second operand, or NULL. OVER is
670 an existing label that should be put at the exit, or NULL. */
671
672 static void
673 frob_into_branch_around (tree *tp, tree lab, tree over)
674 {
675 tree x, op1;
676
677 op1 = TREE_OPERAND (*tp, 1);
678 *tp = TREE_OPERAND (*tp, 0);
679
680 if (block_may_fallthru (*tp))
681 {
682 if (!over)
683 over = create_artificial_label ();
684 x = build1 (GOTO_EXPR, void_type_node, over);
685 append_to_statement_list (x, tp);
686 }
687
688 if (lab)
689 {
690 x = build1 (LABEL_EXPR, void_type_node, lab);
691 append_to_statement_list (x, tp);
692 }
693
694 append_to_statement_list (op1, tp);
695
696 if (over)
697 {
698 x = build1 (LABEL_EXPR, void_type_node, over);
699 append_to_statement_list (x, tp);
700 }
701 }
702
703 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
704 Make sure to record all new labels found. */
705
706 static tree
707 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
708 {
709 tree region = NULL;
710
711 t = unsave_expr_now (t);
712
713 if (outer_state->tf)
714 region = outer_state->tf->try_finally_expr;
715 collect_finally_tree (t, region);
716
717 return t;
718 }
719
720 /* A subroutine of lower_try_finally. Create a fallthru label for
721 the given try_finally state. The only tricky bit here is that
722 we have to make sure to record the label in our outer context. */
723
724 static tree
725 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
726 {
727 tree label = tf->fallthru_label;
728 if (!label)
729 {
730 label = create_artificial_label ();
731 tf->fallthru_label = label;
732 if (tf->outer->tf)
733 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
734 }
735 return label;
736 }
737
738 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
739 returns non-null, then the language requires that the exception path out
740 of a try_finally be treated specially. To wit: the code within the
741 finally block may not itself throw an exception. We have two choices here.
742 First we can duplicate the finally block and wrap it in a must_not_throw
743 region. Second, we can generate code like
744
745 try {
746 finally_block;
747 } catch {
748 if (fintmp == eh_edge)
749 protect_cleanup_actions;
750 }
751
752 where "fintmp" is the temporary used in the switch statement generation
753 alternative considered below. For the nonce, we always choose the first
754 option.
755
756 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
757
758 static void
759 honor_protect_cleanup_actions (struct leh_state *outer_state,
760 struct leh_state *this_state,
761 struct leh_tf_state *tf)
762 {
763 tree protect_cleanup_actions, finally, x;
764 tree_stmt_iterator i;
765 bool finally_may_fallthru;
766
767 /* First check for nothing to do. */
768 if (lang_protect_cleanup_actions)
769 protect_cleanup_actions = lang_protect_cleanup_actions ();
770 else
771 protect_cleanup_actions = NULL;
772
773 finally = TREE_OPERAND (*tf->top_p, 1);
774
775 /* If the EH case of the finally block can fall through, this may be a
776 structure of the form
777 try {
778 try {
779 throw ...;
780 } cleanup {
781 try {
782 throw ...;
783 } catch (...) {
784 }
785 }
786 } catch (...) {
787 yyy;
788 }
789 E.g. with an inline destructor with an embedded try block. In this
790 case we must save the runtime EH data around the nested exception.
791
792 This complication means that any time the previous runtime data might
793 be used (via fallthru from the finally) we handle the eh case here,
794 whether or not protect_cleanup_actions is active. */
795
796 finally_may_fallthru = block_may_fallthru (finally);
797 if (!finally_may_fallthru && !protect_cleanup_actions)
798 return;
799
800 /* Duplicate the FINALLY block. Only need to do this for try-finally,
801 and not for cleanups. */
802 if (this_state)
803 finally = lower_try_finally_dup_block (finally, outer_state);
804
805 /* Resume execution after the exception. Adding this now lets
806 lower_eh_filter not add unnecessary gotos, as it is clear that
807 we never fallthru from this copy of the finally block. */
808 if (finally_may_fallthru)
809 {
810 tree save_eptr, save_filt;
811
812 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
813 save_filt = create_tmp_var (integer_type_node, "save_filt");
814
815 i = tsi_start (finally);
816 x = build (EXC_PTR_EXPR, ptr_type_node);
817 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
818 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
819
820 x = build (FILTER_EXPR, integer_type_node);
821 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
822 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
823
824 i = tsi_last (finally);
825 x = build (EXC_PTR_EXPR, ptr_type_node);
826 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
827 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
828
829 x = build (FILTER_EXPR, integer_type_node);
830 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
831 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
832
833 x = build1 (RESX_EXPR, void_type_node,
834 build_int_cst (NULL_TREE,
835 get_eh_region_number (tf->region)));
836 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
837 }
838
839 /* Wrap the block with protect_cleanup_actions as the action. */
840 if (protect_cleanup_actions)
841 {
842 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
843 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
844 EH_FILTER_MUST_NOT_THROW (x) = 1;
845 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
846 lower_eh_filter (outer_state, &finally);
847 }
848 else
849 lower_eh_constructs_1 (outer_state, &finally);
850
851 /* Hook this up to the end of the existing try block. If we
852 previously fell through the end, we'll have to branch around.
853 This means adding a new goto, and adding it to the queue. */
854
855 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
856
857 if (tf->may_fallthru)
858 {
859 x = lower_try_finally_fallthru_label (tf);
860 x = build1 (GOTO_EXPR, void_type_node, x);
861 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
862
863 if (this_state)
864 maybe_record_in_goto_queue (this_state, x);
865
866 tf->may_fallthru = false;
867 }
868
869 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
870 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
871 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
872
873 /* Having now been handled, EH isn't to be considered with
874 the rest of the outgoing edges. */
875 tf->may_throw = false;
876 }
877
878 /* A subroutine of lower_try_finally. We have determined that there is
879 no fallthru edge out of the finally block. This means that there is
880 no outgoing edge corresponding to any incoming edge. Restructure the
881 try_finally node for this special case. */
882
883 static void
884 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
885 {
886 tree x, finally, lab, return_val;
887 struct goto_queue_node *q, *qe;
888
889 if (tf->may_throw)
890 lab = tf->eh_label;
891 else
892 lab = create_artificial_label ();
893
894 finally = TREE_OPERAND (*tf->top_p, 1);
895 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
896
897 x = build1 (LABEL_EXPR, void_type_node, lab);
898 append_to_statement_list (x, tf->top_p);
899
900 return_val = NULL;
901 q = tf->goto_queue;
902 qe = q + tf->goto_queue_active;
903 for (; q < qe; ++q)
904 if (q->index < 0)
905 do_return_redirection (q, lab, NULL, &return_val);
906 else
907 do_goto_redirection (q, lab, NULL);
908
909 replace_goto_queue (tf);
910
911 lower_eh_constructs_1 (state, &finally);
912 append_to_statement_list (finally, tf->top_p);
913 }
914
915 /* A subroutine of lower_try_finally. We have determined that there is
916 exactly one destination of the finally block. Restructure the
917 try_finally node for this special case. */
918
919 static void
920 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
921 {
922 struct goto_queue_node *q, *qe;
923 tree x, finally, finally_label;
924
925 finally = TREE_OPERAND (*tf->top_p, 1);
926 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
927
928 lower_eh_constructs_1 (state, &finally);
929
930 if (tf->may_throw)
931 {
932 /* Only reachable via the exception edge. Add the given label to
933 the head of the FINALLY block. Append a RESX at the end. */
934
935 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
936 append_to_statement_list (x, tf->top_p);
937
938 append_to_statement_list (finally, tf->top_p);
939
940 x = build1 (RESX_EXPR, void_type_node,
941 build_int_cst (NULL_TREE,
942 get_eh_region_number (tf->region)));
943 append_to_statement_list (x, tf->top_p);
944
945 return;
946 }
947
948 if (tf->may_fallthru)
949 {
950 /* Only reachable via the fallthru edge. Do nothing but let
951 the two blocks run together; we'll fall out the bottom. */
952 append_to_statement_list (finally, tf->top_p);
953 return;
954 }
955
956 finally_label = create_artificial_label ();
957 x = build1 (LABEL_EXPR, void_type_node, finally_label);
958 append_to_statement_list (x, tf->top_p);
959
960 append_to_statement_list (finally, tf->top_p);
961
962 q = tf->goto_queue;
963 qe = q + tf->goto_queue_active;
964
965 if (tf->may_return)
966 {
967 /* Reachable by return expressions only. Redirect them. */
968 tree return_val = NULL;
969 for (; q < qe; ++q)
970 do_return_redirection (q, finally_label, NULL, &return_val);
971 replace_goto_queue (tf);
972 }
973 else
974 {
975 /* Reachable by goto expressions only. Redirect them. */
976 for (; q < qe; ++q)
977 do_goto_redirection (q, finally_label, NULL);
978 replace_goto_queue (tf);
979
980 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
981 {
982 /* Reachable by goto to fallthru label only. Redirect it
983 to the new label (already created, sadly), and do not
984 emit the final branch out, or the fallthru label. */
985 tf->fallthru_label = NULL;
986 return;
987 }
988 }
989
990 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
991 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
992 }
993
994 /* A subroutine of lower_try_finally. There are multiple edges incoming
995 and outgoing from the finally block. Implement this by duplicating the
996 finally block for every destination. */
997
998 static void
999 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1000 {
1001 tree finally, new_stmt;
1002 tree x;
1003
1004 finally = TREE_OPERAND (*tf->top_p, 1);
1005 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1006
1007 new_stmt = NULL_TREE;
1008
1009 if (tf->may_fallthru)
1010 {
1011 x = lower_try_finally_dup_block (finally, state);
1012 lower_eh_constructs_1 (state, &x);
1013 append_to_statement_list (x, &new_stmt);
1014
1015 x = lower_try_finally_fallthru_label (tf);
1016 x = build1 (GOTO_EXPR, void_type_node, x);
1017 append_to_statement_list (x, &new_stmt);
1018 }
1019
1020 if (tf->may_throw)
1021 {
1022 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1023 append_to_statement_list (x, &new_stmt);
1024
1025 x = lower_try_finally_dup_block (finally, state);
1026 lower_eh_constructs_1 (state, &x);
1027 append_to_statement_list (x, &new_stmt);
1028
1029 x = build1 (RESX_EXPR, void_type_node,
1030 build_int_cst (NULL_TREE,
1031 get_eh_region_number (tf->region)));
1032 append_to_statement_list (x, &new_stmt);
1033 }
1034
1035 if (tf->goto_queue)
1036 {
1037 struct goto_queue_node *q, *qe;
1038 tree return_val = NULL;
1039 int return_index;
1040 tree *labels;
1041
1042 if (tf->dest_array)
1043 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1044 else
1045 return_index = 0;
1046 labels = xcalloc (sizeof (tree), return_index + 1);
1047
1048 q = tf->goto_queue;
1049 qe = q + tf->goto_queue_active;
1050 for (; q < qe; q++)
1051 {
1052 int index = q->index < 0 ? return_index : q->index;
1053 tree lab = labels[index];
1054 bool build_p = false;
1055
1056 if (!lab)
1057 {
1058 labels[index] = lab = create_artificial_label ();
1059 build_p = true;
1060 }
1061
1062 if (index == return_index)
1063 do_return_redirection (q, lab, NULL, &return_val);
1064 else
1065 do_goto_redirection (q, lab, NULL);
1066
1067 if (build_p)
1068 {
1069 x = build1 (LABEL_EXPR, void_type_node, lab);
1070 append_to_statement_list (x, &new_stmt);
1071
1072 x = lower_try_finally_dup_block (finally, state);
1073 lower_eh_constructs_1 (state, &x);
1074 append_to_statement_list (x, &new_stmt);
1075
1076 append_to_statement_list (q->cont_stmt, &new_stmt);
1077 maybe_record_in_goto_queue (state, q->cont_stmt);
1078 }
1079 }
1080 replace_goto_queue (tf);
1081 free (labels);
1082 }
1083
1084 /* Need to link new stmts after running replace_goto_queue due
1085 to not wanting to process the same goto stmts twice. */
1086 append_to_statement_list (new_stmt, tf->top_p);
1087 }
1088
1089 /* A subroutine of lower_try_finally. There are multiple edges incoming
1090 and outgoing from the finally block. Implement this by instrumenting
1091 each incoming edge and creating a switch statement at the end of the
1092 finally block that branches to the appropriate destination. */
1093
1094 static void
1095 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1096 {
1097 struct goto_queue_node *q, *qe;
1098 tree return_val = NULL;
1099 tree finally, finally_tmp, finally_label;
1100 int return_index, eh_index, fallthru_index;
1101 int nlabels, ndests, j, last_case_index;
1102 tree case_label_vec, switch_stmt, last_case, switch_body;
1103 tree x;
1104
1105 /* Mash the TRY block to the head of the chain. */
1106 finally = TREE_OPERAND (*tf->top_p, 1);
1107 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1108
1109 /* Lower the finally block itself. */
1110 lower_eh_constructs_1 (state, &finally);
1111
1112 /* Prepare for switch statement generation. */
1113 if (tf->dest_array)
1114 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1115 else
1116 nlabels = 0;
1117 return_index = nlabels;
1118 eh_index = return_index + tf->may_return;
1119 fallthru_index = eh_index + tf->may_throw;
1120 ndests = fallthru_index + tf->may_fallthru;
1121
1122 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1123 finally_label = create_artificial_label ();
1124
1125 case_label_vec = make_tree_vec (ndests);
1126 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1127 NULL_TREE, case_label_vec);
1128 switch_body = NULL;
1129 last_case = NULL;
1130 last_case_index = 0;
1131
1132 /* Begin inserting code for getting to the finally block. Things
1133 are done in this order to correspond to the sequence the code is
1134 layed out. */
1135
1136 if (tf->may_fallthru)
1137 {
1138 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1139 build_int_cst (NULL_TREE, fallthru_index));
1140 append_to_statement_list (x, tf->top_p);
1141
1142 if (tf->may_throw)
1143 {
1144 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1145 append_to_statement_list (x, tf->top_p);
1146 }
1147
1148
1149 last_case = build (CASE_LABEL_EXPR, void_type_node,
1150 build_int_cst (NULL_TREE, fallthru_index), NULL,
1151 create_artificial_label ());
1152 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1153 last_case_index++;
1154
1155 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1156 append_to_statement_list (x, &switch_body);
1157
1158 x = lower_try_finally_fallthru_label (tf);
1159 x = build1 (GOTO_EXPR, void_type_node, x);
1160 append_to_statement_list (x, &switch_body);
1161 }
1162
1163 if (tf->may_throw)
1164 {
1165 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1166 append_to_statement_list (x, tf->top_p);
1167
1168 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1169 build_int_cst (NULL_TREE, eh_index));
1170 append_to_statement_list (x, tf->top_p);
1171
1172 last_case = build (CASE_LABEL_EXPR, void_type_node,
1173 build_int_cst (NULL_TREE, eh_index), NULL,
1174 create_artificial_label ());
1175 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1176 last_case_index++;
1177
1178 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1179 append_to_statement_list (x, &switch_body);
1180 x = build1 (RESX_EXPR, void_type_node,
1181 build_int_cst (NULL_TREE,
1182 get_eh_region_number (tf->region)));
1183 append_to_statement_list (x, &switch_body);
1184 }
1185
1186 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1187 append_to_statement_list (x, tf->top_p);
1188
1189 append_to_statement_list (finally, tf->top_p);
1190
1191 /* Redirect each incoming goto edge. */
1192 q = tf->goto_queue;
1193 qe = q + tf->goto_queue_active;
1194 j = last_case_index + tf->may_return;
1195 last_case_index += nlabels;
1196 for (; q < qe; ++q)
1197 {
1198 tree mod;
1199 int switch_id, case_index;
1200
1201 if (q->index < 0)
1202 {
1203 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1204 build_int_cst (NULL_TREE, return_index));
1205 do_return_redirection (q, finally_label, mod, &return_val);
1206 switch_id = return_index;
1207 }
1208 else
1209 {
1210 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1211 build_int_cst (NULL_TREE, q->index));
1212 do_goto_redirection (q, finally_label, mod);
1213 switch_id = q->index;
1214 }
1215
1216 case_index = j + q->index;
1217 if (!TREE_VEC_ELT (case_label_vec, case_index))
1218 {
1219 last_case = build (CASE_LABEL_EXPR, void_type_node,
1220 build_int_cst (NULL_TREE, switch_id), NULL,
1221 create_artificial_label ());
1222 TREE_VEC_ELT (case_label_vec, case_index) = last_case;
1223
1224 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1225 append_to_statement_list (x, &switch_body);
1226 append_to_statement_list (q->cont_stmt, &switch_body);
1227 maybe_record_in_goto_queue (state, q->cont_stmt);
1228 }
1229 }
1230 replace_goto_queue (tf);
1231 last_case_index += nlabels;
1232
1233 /* Make sure that the last case is the default label, as one is required.
1234 Then sort the labels, which is also required in GIMPLE. */
1235 CASE_LOW (last_case) = NULL;
1236 sort_case_labels (case_label_vec);
1237
1238 /* Need to link switch_stmt after running replace_goto_queue due
1239 to not wanting to process the same goto stmts twice. */
1240 append_to_statement_list (switch_stmt, tf->top_p);
1241 append_to_statement_list (switch_body, tf->top_p);
1242 }
1243
1244 /* Decide whether or not we are going to duplicate the finally block.
1245 There are several considerations.
1246
1247 First, if this is Java, then the finally block contains code
1248 written by the user. It has line numbers associated with it,
1249 so duplicating the block means it's difficult to set a breakpoint.
1250 Since controlling code generation via -g is verboten, we simply
1251 never duplicate code without optimization.
1252
1253 Second, we'd like to prevent egregious code growth. One way to
1254 do this is to estimate the size of the finally block, multiply
1255 that by the number of copies we'd need to make, and compare against
1256 the estimate of the size of the switch machinery we'd have to add. */
1257
1258 static bool
1259 decide_copy_try_finally (int ndests, tree finally)
1260 {
1261 int f_estimate, sw_estimate;
1262
1263 if (!optimize)
1264 return false;
1265
1266 /* Finally estimate N times, plus N gotos. */
1267 f_estimate = estimate_num_insns (finally);
1268 f_estimate = (f_estimate + 1) * ndests;
1269
1270 /* Switch statement (cost 10), N variable assignments, N gotos. */
1271 sw_estimate = 10 + 2 * ndests;
1272
1273 /* Optimize for size clearly wants our best guess. */
1274 if (optimize_size)
1275 return f_estimate < sw_estimate;
1276
1277 /* ??? These numbers are completely made up so far. */
1278 if (optimize > 1)
1279 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1280 else
1281 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1282 }
1283
1284 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1285 to a sequence of labels and blocks, plus the exception region trees
1286 that record all the magic. This is complicated by the need to
1287 arrange for the FINALLY block to be executed on all exits. */
1288
1289 static void
1290 lower_try_finally (struct leh_state *state, tree *tp)
1291 {
1292 struct leh_tf_state this_tf;
1293 struct leh_state this_state;
1294 int ndests;
1295
1296 /* Process the try block. */
1297
1298 memset (&this_tf, 0, sizeof (this_tf));
1299 this_tf.try_finally_expr = *tp;
1300 this_tf.top_p = tp;
1301 this_tf.outer = state;
1302 if (using_eh_for_cleanups_p)
1303 this_tf.region
1304 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1305 else
1306 this_tf.region = NULL;
1307
1308 this_state.cur_region = this_tf.region;
1309 this_state.prev_try = state->prev_try;
1310 this_state.tf = &this_tf;
1311
1312 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1313
1314 /* Determine if the try block is escaped through the bottom. */
1315 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1316
1317 /* Determine if any exceptions are possible within the try block. */
1318 if (using_eh_for_cleanups_p)
1319 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1320 if (this_tf.may_throw)
1321 {
1322 this_tf.eh_label = create_artificial_label ();
1323 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1324 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1325 }
1326
1327 /* Sort the goto queue for efficient searching later. */
1328 if (this_tf.goto_queue_active > 1)
1329 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1330 sizeof (struct goto_queue_node), goto_queue_cmp);
1331
1332 /* Determine how many edges (still) reach the finally block. Or rather,
1333 how many destinations are reached by the finally block. Use this to
1334 determine how we process the finally block itself. */
1335
1336 if (this_tf.dest_array)
1337 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1338 else
1339 ndests = 0;
1340 ndests += this_tf.may_fallthru;
1341 ndests += this_tf.may_return;
1342 ndests += this_tf.may_throw;
1343
1344 /* If the FINALLY block is not reachable, dike it out. */
1345 if (ndests == 0)
1346 *tp = TREE_OPERAND (*tp, 0);
1347
1348 /* If the finally block doesn't fall through, then any destination
1349 we might try to impose there isn't reached either. There may be
1350 some minor amount of cleanup and redirection still needed. */
1351 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1352 lower_try_finally_nofallthru (state, &this_tf);
1353
1354 /* We can easily special-case redirection to a single destination. */
1355 else if (ndests == 1)
1356 lower_try_finally_onedest (state, &this_tf);
1357
1358 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1359 lower_try_finally_copy (state, &this_tf);
1360 else
1361 lower_try_finally_switch (state, &this_tf);
1362
1363 /* If someone requested we add a label at the end of the transformed
1364 block, do so. */
1365 if (this_tf.fallthru_label)
1366 {
1367 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1368 append_to_statement_list (x, tp);
1369 }
1370
1371 if (this_tf.goto_queue)
1372 free (this_tf.goto_queue);
1373 }
1374
1375 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1376 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1377 exception region trees that record all the magic. */
1378
1379 static void
1380 lower_catch (struct leh_state *state, tree *tp)
1381 {
1382 struct eh_region *try_region;
1383 struct leh_state this_state;
1384 tree_stmt_iterator i;
1385 tree out_label;
1386
1387 try_region = gen_eh_region_try (state->cur_region);
1388 this_state.cur_region = try_region;
1389 this_state.prev_try = try_region;
1390 this_state.tf = state->tf;
1391
1392 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1393
1394 if (!get_eh_region_may_contain_throw (try_region))
1395 {
1396 *tp = TREE_OPERAND (*tp, 0);
1397 return;
1398 }
1399
1400 out_label = NULL;
1401 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1402 {
1403 struct eh_region *catch_region;
1404 tree catch, x, eh_label;
1405
1406 catch = tsi_stmt (i);
1407 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1408
1409 this_state.cur_region = catch_region;
1410 this_state.prev_try = state->prev_try;
1411 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1412
1413 eh_label = create_artificial_label ();
1414 set_eh_region_tree_label (catch_region, eh_label);
1415
1416 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1417 tsi_link_before (&i, x, TSI_SAME_STMT);
1418
1419 if (block_may_fallthru (CATCH_BODY (catch)))
1420 {
1421 if (!out_label)
1422 out_label = create_artificial_label ();
1423
1424 x = build1 (GOTO_EXPR, void_type_node, out_label);
1425 append_to_statement_list (x, &CATCH_BODY (catch));
1426 }
1427
1428 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1429 tsi_delink (&i);
1430 }
1431
1432 frob_into_branch_around (tp, NULL, out_label);
1433 }
1434
1435 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1436 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1437 region trees that record all the magic. */
1438
1439 static void
1440 lower_eh_filter (struct leh_state *state, tree *tp)
1441 {
1442 struct leh_state this_state;
1443 struct eh_region *this_region;
1444 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1445 tree eh_label;
1446
1447 if (EH_FILTER_MUST_NOT_THROW (inner))
1448 this_region = gen_eh_region_must_not_throw (state->cur_region);
1449 else
1450 this_region = gen_eh_region_allowed (state->cur_region,
1451 EH_FILTER_TYPES (inner));
1452 this_state = *state;
1453 this_state.cur_region = this_region;
1454
1455 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1456
1457 if (!get_eh_region_may_contain_throw (this_region))
1458 {
1459 *tp = TREE_OPERAND (*tp, 0);
1460 return;
1461 }
1462
1463 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1464 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1465
1466 eh_label = create_artificial_label ();
1467 set_eh_region_tree_label (this_region, eh_label);
1468
1469 frob_into_branch_around (tp, eh_label, NULL);
1470 }
1471
1472 /* Implement a cleanup expression. This is similar to try-finally,
1473 except that we only execute the cleanup block for exception edges. */
1474
1475 static void
1476 lower_cleanup (struct leh_state *state, tree *tp)
1477 {
1478 struct leh_state this_state;
1479 struct eh_region *this_region;
1480 struct leh_tf_state fake_tf;
1481
1482 /* If not using eh, then exception-only cleanups are no-ops. */
1483 if (!flag_exceptions)
1484 {
1485 *tp = TREE_OPERAND (*tp, 0);
1486 lower_eh_constructs_1 (state, tp);
1487 return;
1488 }
1489
1490 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1491 this_state = *state;
1492 this_state.cur_region = this_region;
1493
1494 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1495
1496 if (!get_eh_region_may_contain_throw (this_region))
1497 {
1498 *tp = TREE_OPERAND (*tp, 0);
1499 return;
1500 }
1501
1502 /* Build enough of a try-finally state so that we can reuse
1503 honor_protect_cleanup_actions. */
1504 memset (&fake_tf, 0, sizeof (fake_tf));
1505 fake_tf.top_p = tp;
1506 fake_tf.outer = state;
1507 fake_tf.region = this_region;
1508 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1509 fake_tf.may_throw = true;
1510
1511 fake_tf.eh_label = create_artificial_label ();
1512 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1513
1514 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1515
1516 if (fake_tf.may_throw)
1517 {
1518 /* In this case honor_protect_cleanup_actions had nothing to do,
1519 and we should process this normally. */
1520 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1521 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1522 }
1523 else
1524 {
1525 /* In this case honor_protect_cleanup_actions did nearly all of
1526 the work. All we have left is to append the fallthru_label. */
1527
1528 *tp = TREE_OPERAND (*tp, 0);
1529 if (fake_tf.fallthru_label)
1530 {
1531 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1532 append_to_statement_list (x, tp);
1533 }
1534 }
1535 }
1536
1537 /* Main loop for lowering eh constructs. */
1538
1539 static void
1540 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1541 {
1542 tree_stmt_iterator i;
1543 tree t = *tp;
1544
1545 switch (TREE_CODE (t))
1546 {
1547 case COND_EXPR:
1548 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1549 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1550 break;
1551
1552 case CALL_EXPR:
1553 /* Look for things that can throw exceptions, and record them. */
1554 if (state->cur_region && tree_could_throw_p (t))
1555 {
1556 record_stmt_eh_region (state->cur_region, t);
1557 note_eh_region_may_contain_throw (state->cur_region);
1558 }
1559 break;
1560
1561 case MODIFY_EXPR:
1562 /* Look for things that can throw exceptions, and record them. */
1563 if (state->cur_region && tree_could_throw_p (t))
1564 {
1565 tree op;
1566
1567 record_stmt_eh_region (state->cur_region, t);
1568 note_eh_region_may_contain_throw (state->cur_region);
1569
1570 /* ??? For the benefit of calls.c, converting all this to rtl,
1571 we need to record the call expression, not just the outer
1572 modify statement. */
1573 op = get_call_expr_in (t);
1574 if (op)
1575 record_stmt_eh_region (state->cur_region, op);
1576 }
1577 break;
1578
1579 case GOTO_EXPR:
1580 case RETURN_EXPR:
1581 maybe_record_in_goto_queue (state, t);
1582 break;
1583 case SWITCH_EXPR:
1584 verify_norecord_switch_expr (state, t);
1585 break;
1586
1587 case TRY_FINALLY_EXPR:
1588 lower_try_finally (state, tp);
1589 break;
1590
1591 case TRY_CATCH_EXPR:
1592 i = tsi_start (TREE_OPERAND (t, 1));
1593 switch (TREE_CODE (tsi_stmt (i)))
1594 {
1595 case CATCH_EXPR:
1596 lower_catch (state, tp);
1597 break;
1598 case EH_FILTER_EXPR:
1599 lower_eh_filter (state, tp);
1600 break;
1601 default:
1602 lower_cleanup (state, tp);
1603 break;
1604 }
1605 break;
1606
1607 case STATEMENT_LIST:
1608 for (i = tsi_start (t); !tsi_end_p (i); )
1609 {
1610 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1611 t = tsi_stmt (i);
1612 if (TREE_CODE (t) == STATEMENT_LIST)
1613 {
1614 tsi_link_before (&i, t, TSI_SAME_STMT);
1615 tsi_delink (&i);
1616 }
1617 else
1618 tsi_next (&i);
1619 }
1620 break;
1621
1622 default:
1623 /* A type, a decl, or some kind of statement that we're not
1624 interested in. Don't walk them. */
1625 break;
1626 }
1627 }
1628
1629 static void
1630 lower_eh_constructs (void)
1631 {
1632 struct leh_state null_state;
1633 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1634
1635 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1636 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
1637 ggc_free);
1638
1639 collect_finally_tree (*tp, NULL);
1640
1641 memset (&null_state, 0, sizeof (null_state));
1642 lower_eh_constructs_1 (&null_state, tp);
1643
1644 htab_delete (finally_tree);
1645
1646 collect_eh_region_array ();
1647 }
1648
1649 struct tree_opt_pass pass_lower_eh =
1650 {
1651 "eh", /* name */
1652 NULL, /* gate */
1653 lower_eh_constructs, /* execute */
1654 NULL, /* sub */
1655 NULL, /* next */
1656 0, /* static_pass_number */
1657 TV_TREE_EH, /* tv_id */
1658 PROP_gimple_lcf, /* properties_required */
1659 PROP_gimple_leh, /* properties_provided */
1660 PROP_gimple_lcf, /* properties_destroyed */
1661 0, /* todo_flags_start */
1662 TODO_dump_func, /* todo_flags_finish */
1663 0 /* letter */
1664 };
1665
1666 \f
1667 /* Construct EH edges for STMT. */
1668
1669 static void
1670 make_eh_edge (struct eh_region *region, void *data)
1671 {
1672 tree stmt, lab;
1673 basic_block src, dst;
1674
1675 stmt = data;
1676 lab = get_eh_region_tree_label (region);
1677
1678 src = bb_for_stmt (stmt);
1679 dst = label_to_block (lab);
1680
1681 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1682 }
1683
1684 void
1685 make_eh_edges (tree stmt)
1686 {
1687 int region_nr;
1688 bool is_resx;
1689
1690 if (TREE_CODE (stmt) == RESX_EXPR)
1691 {
1692 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1693 is_resx = true;
1694 }
1695 else
1696 {
1697 region_nr = lookup_stmt_eh_region (stmt);
1698 if (region_nr < 0)
1699 return;
1700 is_resx = false;
1701 }
1702
1703 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1704 }
1705
1706
1707 \f
1708 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1709 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1710 This routine expects only GIMPLE lhs or rhs input. */
1711
1712 bool
1713 tree_could_trap_p (tree expr)
1714 {
1715 enum tree_code code = TREE_CODE (expr);
1716 bool honor_nans = false;
1717 bool honor_snans = false;
1718 bool fp_operation = false;
1719 bool honor_trapv = false;
1720 tree t, base, idx;
1721
1722 if (TREE_CODE_CLASS (code) == tcc_comparison
1723 || TREE_CODE_CLASS (code) == tcc_unary
1724 || TREE_CODE_CLASS (code) == tcc_binary)
1725 {
1726 t = TREE_TYPE (expr);
1727 fp_operation = FLOAT_TYPE_P (t);
1728 if (fp_operation)
1729 {
1730 honor_nans = flag_trapping_math && !flag_finite_math_only;
1731 honor_snans = flag_signaling_nans != 0;
1732 }
1733 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1734 honor_trapv = true;
1735 }
1736
1737 restart:
1738 switch (code)
1739 {
1740 case COMPONENT_REF:
1741 case REALPART_EXPR:
1742 case IMAGPART_EXPR:
1743 case BIT_FIELD_REF:
1744 case WITH_SIZE_EXPR:
1745 expr = TREE_OPERAND (expr, 0);
1746 code = TREE_CODE (expr);
1747 goto restart;
1748
1749 case ARRAY_RANGE_REF:
1750 /* Let us be conservative here for now. We might be checking bounds of
1751 the access similarly to the case below. */
1752 if (!TREE_THIS_NOTRAP (expr))
1753 return true;
1754
1755 base = TREE_OPERAND (expr, 0);
1756 return tree_could_trap_p (base);
1757
1758 case ARRAY_REF:
1759 base = TREE_OPERAND (expr, 0);
1760 idx = TREE_OPERAND (expr, 1);
1761 if (tree_could_trap_p (base))
1762 return true;
1763
1764 if (TREE_THIS_NOTRAP (expr))
1765 return false;
1766
1767 return !in_array_bounds_p (expr);
1768
1769 case INDIRECT_REF:
1770 case ALIGN_INDIRECT_REF:
1771 case MISALIGNED_INDIRECT_REF:
1772 return !TREE_THIS_NOTRAP (expr);
1773
1774 case ASM_EXPR:
1775 return TREE_THIS_VOLATILE (expr);
1776
1777 case TRUNC_DIV_EXPR:
1778 case CEIL_DIV_EXPR:
1779 case FLOOR_DIV_EXPR:
1780 case ROUND_DIV_EXPR:
1781 case EXACT_DIV_EXPR:
1782 case CEIL_MOD_EXPR:
1783 case FLOOR_MOD_EXPR:
1784 case ROUND_MOD_EXPR:
1785 case TRUNC_MOD_EXPR:
1786 case RDIV_EXPR:
1787 if (honor_snans || honor_trapv)
1788 return true;
1789 if (fp_operation && flag_trapping_math)
1790 return true;
1791 t = TREE_OPERAND (expr, 1);
1792 if (!TREE_CONSTANT (t) || integer_zerop (t))
1793 return true;
1794 return false;
1795
1796 case LT_EXPR:
1797 case LE_EXPR:
1798 case GT_EXPR:
1799 case GE_EXPR:
1800 case LTGT_EXPR:
1801 /* Some floating point comparisons may trap. */
1802 return honor_nans;
1803
1804 case EQ_EXPR:
1805 case NE_EXPR:
1806 case UNORDERED_EXPR:
1807 case ORDERED_EXPR:
1808 case UNLT_EXPR:
1809 case UNLE_EXPR:
1810 case UNGT_EXPR:
1811 case UNGE_EXPR:
1812 case UNEQ_EXPR:
1813 return honor_snans;
1814
1815 case CONVERT_EXPR:
1816 case FIX_TRUNC_EXPR:
1817 case FIX_CEIL_EXPR:
1818 case FIX_FLOOR_EXPR:
1819 case FIX_ROUND_EXPR:
1820 /* Conversion of floating point might trap. */
1821 return honor_nans;
1822
1823 case NEGATE_EXPR:
1824 case ABS_EXPR:
1825 case CONJ_EXPR:
1826 /* These operations don't trap with floating point. */
1827 if (honor_trapv)
1828 return true;
1829 return false;
1830
1831 case PLUS_EXPR:
1832 case MINUS_EXPR:
1833 case MULT_EXPR:
1834 /* Any floating arithmetic may trap. */
1835 if (fp_operation && flag_trapping_math)
1836 return true;
1837 if (honor_trapv)
1838 return true;
1839 return false;
1840
1841 default:
1842 /* Any floating arithmetic may trap. */
1843 if (fp_operation && flag_trapping_math)
1844 return true;
1845 return false;
1846 }
1847 }
1848
1849 bool
1850 tree_could_throw_p (tree t)
1851 {
1852 if (!flag_exceptions)
1853 return false;
1854 if (TREE_CODE (t) == MODIFY_EXPR)
1855 {
1856 if (flag_non_call_exceptions
1857 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1858 return true;
1859 t = TREE_OPERAND (t, 1);
1860 }
1861
1862 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1863 t = TREE_OPERAND (t, 0);
1864 if (TREE_CODE (t) == CALL_EXPR)
1865 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1866 if (flag_non_call_exceptions)
1867 return tree_could_trap_p (t);
1868 return false;
1869 }
1870
1871 bool
1872 tree_can_throw_internal (tree stmt)
1873 {
1874 int region_nr = lookup_stmt_eh_region (stmt);
1875 if (region_nr < 0)
1876 return false;
1877 return can_throw_internal_1 (region_nr);
1878 }
1879
1880 bool
1881 tree_can_throw_external (tree stmt)
1882 {
1883 int region_nr = lookup_stmt_eh_region (stmt);
1884 if (region_nr < 0)
1885 return false;
1886 return can_throw_external_1 (region_nr);
1887 }
1888
1889 bool
1890 maybe_clean_eh_stmt (tree stmt)
1891 {
1892 if (!tree_could_throw_p (stmt))
1893 if (remove_stmt_from_eh_region (stmt))
1894 return true;
1895 return false;
1896 }
1897
1898 #include "gt-tree-eh.h"