real.h (struct real_format): Split the signbit field into two two fields, signbit_ro...
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39
40 \f
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
43
44 void
45 using_eh_for_cleanups (void)
46 {
47 using_eh_for_cleanups_p = 1;
48 }
49 \f
50 /* Misc functions used in this file. */
51
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
55
56 static int
57 struct_ptr_eq (const void *a, const void *b)
58 {
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
62 }
63
64 static hashval_t
65 struct_ptr_hash (const void *a)
66 {
67 const void * const * x = a;
68 return (size_t)*x >> 4;
69 }
70
71 \f
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
75
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
79
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
83
84 struct throw_stmt_node GTY(())
85 {
86 tree stmt;
87 int region_nr;
88 };
89
90 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
91
92 static void
93 record_stmt_eh_region (struct eh_region *region, tree t)
94 {
95 struct throw_stmt_node *n;
96 void **slot;
97
98 if (!region)
99 return;
100
101 n = ggc_alloc (sizeof (*n));
102 n->stmt = t;
103 n->region_nr = get_eh_region_number (region);
104
105 slot = htab_find_slot (throw_stmt_table, n, INSERT);
106 gcc_assert (!*slot);
107 *slot = n;
108 }
109
110 void
111 add_stmt_to_eh_region (tree t, int num)
112 {
113 struct throw_stmt_node *n;
114 void **slot;
115
116 gcc_assert (num >= 0);
117
118 n = ggc_alloc (sizeof (*n));
119 n->stmt = t;
120 n->region_nr = num;
121
122 slot = htab_find_slot (throw_stmt_table, n, INSERT);
123 gcc_assert (!*slot);
124 *slot = n;
125 }
126
127 bool
128 remove_stmt_from_eh_region (tree t)
129 {
130 struct throw_stmt_node dummy;
131 void **slot;
132
133 if (!throw_stmt_table)
134 return false;
135
136 dummy.stmt = t;
137 slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
138 if (slot)
139 {
140 htab_clear_slot (throw_stmt_table, slot);
141 return true;
142 }
143 else
144 return false;
145 }
146
147 int
148 lookup_stmt_eh_region (tree t)
149 {
150 struct throw_stmt_node *p, n;
151
152 if (!throw_stmt_table)
153 return -2;
154
155 n.stmt = t;
156 p = htab_find (throw_stmt_table, &n);
157
158 return (p ? p->region_nr : -1);
159 }
160
161
162 \f
163 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
164 nodes and LABEL_DECL nodes. We will use this during the second phase to
165 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
166
167 struct finally_tree_node
168 {
169 tree child, parent;
170 };
171
172 /* Note that this table is *not* marked GTY. It is short-lived. */
173 static htab_t finally_tree;
174
175 static void
176 record_in_finally_tree (tree child, tree parent)
177 {
178 struct finally_tree_node *n;
179 void **slot;
180
181 n = xmalloc (sizeof (*n));
182 n->child = child;
183 n->parent = parent;
184
185 slot = htab_find_slot (finally_tree, n, INSERT);
186 gcc_assert (!*slot);
187 *slot = n;
188 }
189
190 static void
191 collect_finally_tree (tree t, tree region)
192 {
193 tailrecurse:
194 switch (TREE_CODE (t))
195 {
196 case LABEL_EXPR:
197 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
198 break;
199
200 case TRY_FINALLY_EXPR:
201 record_in_finally_tree (t, region);
202 collect_finally_tree (TREE_OPERAND (t, 0), t);
203 t = TREE_OPERAND (t, 1);
204 goto tailrecurse;
205
206 case TRY_CATCH_EXPR:
207 collect_finally_tree (TREE_OPERAND (t, 0), region);
208 t = TREE_OPERAND (t, 1);
209 goto tailrecurse;
210
211 case CATCH_EXPR:
212 t = CATCH_BODY (t);
213 goto tailrecurse;
214
215 case EH_FILTER_EXPR:
216 t = EH_FILTER_FAILURE (t);
217 goto tailrecurse;
218
219 case STATEMENT_LIST:
220 {
221 tree_stmt_iterator i;
222 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
223 collect_finally_tree (tsi_stmt (i), region);
224 }
225 break;
226
227 default:
228 /* A type, a decl, or some kind of statement that we're not
229 interested in. Don't walk them. */
230 break;
231 }
232 }
233
234 /* Use the finally tree to determine if a jump from START to TARGET
235 would leave the try_finally node that START lives in. */
236
237 static bool
238 outside_finally_tree (tree start, tree target)
239 {
240 struct finally_tree_node n, *p;
241
242 do
243 {
244 n.child = start;
245 p = htab_find (finally_tree, &n);
246 if (!p)
247 return true;
248 start = p->parent;
249 }
250 while (start != target);
251
252 return false;
253 }
254 \f
255 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
256 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
257 The eh region creation is straight-forward, but frobbing all the gotos
258 and such into shape isn't. */
259
260 /* State of the world while lowering. */
261
262 struct leh_state
263 {
264 /* What's "current" while constructing the eh region tree. These
265 correspond to variables of the same name in cfun->eh, which we
266 don't have easy access to. */
267 struct eh_region *cur_region;
268 struct eh_region *prev_try;
269
270 /* Processing of TRY_FINALLY requires a bit more state. This is
271 split out into a separate structure so that we don't have to
272 copy so much when processing other nodes. */
273 struct leh_tf_state *tf;
274 };
275
276 struct leh_tf_state
277 {
278 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
279 is the original TRY_FINALLY_EXPR. We need to retain this so that
280 outside_finally_tree can reliably reference the tree used in the
281 collect_finally_tree data structures. */
282 tree try_finally_expr;
283 tree *top_p;
284
285 /* The state outside this try_finally node. */
286 struct leh_state *outer;
287
288 /* The exception region created for it. */
289 struct eh_region *region;
290
291 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
292 that are seen to escape this TRY_FINALLY_EXPR node. */
293 struct goto_queue_node {
294 tree stmt;
295 tree repl_stmt;
296 tree cont_stmt;
297 int index;
298 } *goto_queue;
299 size_t goto_queue_size;
300 size_t goto_queue_active;
301
302 /* The set of unique labels seen as entries in the goto queue. */
303 varray_type dest_array;
304
305 /* A label to be added at the end of the completed transformed
306 sequence. It will be set if may_fallthru was true *at one time*,
307 though subsequent transformations may have cleared that flag. */
308 tree fallthru_label;
309
310 /* A label that has been registered with except.c to be the
311 landing pad for this try block. */
312 tree eh_label;
313
314 /* True if it is possible to fall out the bottom of the try block.
315 Cleared if the fallthru is converted to a goto. */
316 bool may_fallthru;
317
318 /* True if any entry in goto_queue is a RETURN_EXPR. */
319 bool may_return;
320
321 /* True if the finally block can receive an exception edge.
322 Cleared if the exception case is handled by code duplication. */
323 bool may_throw;
324 };
325
326 static void lower_eh_filter (struct leh_state *, tree *);
327 static void lower_eh_constructs_1 (struct leh_state *, tree *);
328
329 /* Comparison function for qsort/bsearch. We're interested in
330 searching goto queue elements for source statements. */
331
332 static int
333 goto_queue_cmp (const void *x, const void *y)
334 {
335 tree a = ((const struct goto_queue_node *)x)->stmt;
336 tree b = ((const struct goto_queue_node *)y)->stmt;
337 return (a == b ? 0 : a < b ? -1 : 1);
338 }
339
340 /* Search for STMT in the goto queue. Return the replacement,
341 or null if the statement isn't in the queue. */
342
343 static tree
344 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
345 {
346 struct goto_queue_node tmp, *ret;
347 tmp.stmt = stmt;
348 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
349 sizeof (struct goto_queue_node), goto_queue_cmp);
350 return (ret ? ret->repl_stmt : NULL);
351 }
352
353 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
354 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
355 then we can just splat it in, otherwise we add the new stmts immediately
356 after the COND_EXPR and redirect. */
357
358 static void
359 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
360 tree_stmt_iterator *tsi)
361 {
362 tree new, one, label;
363
364 new = find_goto_replacement (tf, *tp);
365 if (!new)
366 return;
367
368 one = expr_only (new);
369 if (one && TREE_CODE (one) == GOTO_EXPR)
370 {
371 *tp = one;
372 return;
373 }
374
375 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
376 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
377
378 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
379 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
380 }
381
382 /* The real work of replace_goto_queue. Returns with TSI updated to
383 point to the next statement. */
384
385 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
386
387 static void
388 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
389 {
390 switch (TREE_CODE (t))
391 {
392 case GOTO_EXPR:
393 case RETURN_EXPR:
394 t = find_goto_replacement (tf, t);
395 if (t)
396 {
397 tsi_link_before (tsi, t, TSI_SAME_STMT);
398 tsi_delink (tsi);
399 return;
400 }
401 break;
402
403 case COND_EXPR:
404 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
405 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
406 break;
407
408 case TRY_FINALLY_EXPR:
409 case TRY_CATCH_EXPR:
410 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
411 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
412 break;
413 case CATCH_EXPR:
414 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
415 break;
416 case EH_FILTER_EXPR:
417 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
418 break;
419
420 case STATEMENT_LIST:
421 gcc_unreachable ();
422
423 default:
424 /* These won't have gotos in them. */
425 break;
426 }
427
428 tsi_next (tsi);
429 }
430
431 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
432
433 static void
434 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
435 {
436 tree_stmt_iterator i = tsi_start (t);
437 while (!tsi_end_p (i))
438 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
439 }
440
441 /* Replace all goto queue members. */
442
443 static void
444 replace_goto_queue (struct leh_tf_state *tf)
445 {
446 if (tf->goto_queue_active == 0)
447 return;
448 replace_goto_queue_stmt_list (*tf->top_p, tf);
449 }
450
451 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
452 node, and if so record that fact in the goto queue associated with that
453 try_finally node. */
454
455 static void
456 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
457 {
458 struct leh_tf_state *tf = state->tf;
459 struct goto_queue_node *q;
460 size_t active, size;
461 int index;
462
463 if (!tf)
464 return;
465
466 switch (TREE_CODE (stmt))
467 {
468 case GOTO_EXPR:
469 {
470 tree lab = GOTO_DESTINATION (stmt);
471
472 /* Computed and non-local gotos do not get processed. Given
473 their nature we can neither tell whether we've escaped the
474 finally block nor redirect them if we knew. */
475 if (TREE_CODE (lab) != LABEL_DECL)
476 return;
477
478 /* No need to record gotos that don't leave the try block. */
479 if (! outside_finally_tree (lab, tf->try_finally_expr))
480 return;
481
482 if (! tf->dest_array)
483 {
484 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
485 VARRAY_PUSH_TREE (tf->dest_array, lab);
486 index = 0;
487 }
488 else
489 {
490 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
491 for (index = 0; index < n; ++index)
492 if (VARRAY_TREE (tf->dest_array, index) == lab)
493 break;
494 if (index == n)
495 VARRAY_PUSH_TREE (tf->dest_array, lab);
496 }
497 }
498 break;
499
500 case RETURN_EXPR:
501 tf->may_return = true;
502 index = -1;
503 break;
504
505 default:
506 gcc_unreachable ();
507 }
508
509 active = tf->goto_queue_active;
510 size = tf->goto_queue_size;
511 if (active >= size)
512 {
513 size = (size ? size * 2 : 32);
514 tf->goto_queue_size = size;
515 tf->goto_queue
516 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
517 }
518
519 q = &tf->goto_queue[active];
520 tf->goto_queue_active = active + 1;
521
522 memset (q, 0, sizeof (*q));
523 q->stmt = stmt;
524 q->index = index;
525 }
526
527 #ifdef ENABLE_CHECKING
528 /* We do not process SWITCH_EXPRs for now. As long as the original source
529 was in fact structured, and we've not yet done jump threading, then none
530 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
531
532 static void
533 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
534 {
535 struct leh_tf_state *tf = state->tf;
536 size_t i, n;
537 tree vec;
538
539 if (!tf)
540 return;
541
542 vec = SWITCH_LABELS (switch_expr);
543 n = TREE_VEC_LENGTH (vec);
544
545 for (i = 0; i < n; ++i)
546 {
547 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
548 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
549 }
550 }
551 #else
552 #define verify_norecord_switch_expr(state, switch_expr)
553 #endif
554
555 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
556 whatever is needed to finish the return. If MOD is non-null, insert it
557 before the new branch. RETURN_VALUE_P is a cache containing a temporary
558 variable to be used in manipulating the value returned from the function. */
559
560 static void
561 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
562 tree *return_value_p)
563 {
564 tree ret_expr = TREE_OPERAND (q->stmt, 0);
565 tree x;
566
567 if (ret_expr)
568 {
569 /* The nasty part about redirecting the return value is that the
570 return value itself is to be computed before the FINALLY block
571 is executed. e.g.
572
573 int x;
574 int foo (void)
575 {
576 x = 0;
577 try {
578 return x;
579 } finally {
580 x++;
581 }
582 }
583
584 should return 0, not 1. Arrange for this to happen by copying
585 computed the return value into a local temporary. This also
586 allows us to redirect multiple return statements through the
587 same destination block; whether this is a net win or not really
588 depends, I guess, but it does make generation of the switch in
589 lower_try_finally_switch easier. */
590
591 switch (TREE_CODE (ret_expr))
592 {
593 case RESULT_DECL:
594 if (!*return_value_p)
595 *return_value_p = ret_expr;
596 else
597 gcc_assert (*return_value_p == ret_expr);
598 q->cont_stmt = q->stmt;
599 break;
600
601 case MODIFY_EXPR:
602 {
603 tree result = TREE_OPERAND (ret_expr, 0);
604 tree new, old = TREE_OPERAND (ret_expr, 1);
605
606 if (!*return_value_p)
607 {
608 if (aggregate_value_p (TREE_TYPE (result),
609 TREE_TYPE (current_function_decl)))
610 /* If this function returns in memory, copy the argument
611 into the return slot now. Otherwise, we might need to
612 worry about magic return semantics, so we need to use a
613 temporary to hold the value until we're actually ready
614 to return. */
615 new = result;
616 else
617 new = create_tmp_var (TREE_TYPE (old), "rettmp");
618 *return_value_p = new;
619 }
620 else
621 new = *return_value_p;
622
623 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
624 append_to_statement_list (x, &q->repl_stmt);
625
626 if (new == result)
627 x = result;
628 else
629 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
630 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
631 }
632
633 default:
634 gcc_unreachable ();
635 }
636 }
637 else
638 {
639 /* If we don't return a value, all return statements are the same. */
640 q->cont_stmt = q->stmt;
641 }
642
643 if (mod)
644 append_to_statement_list (mod, &q->repl_stmt);
645
646 x = build1 (GOTO_EXPR, void_type_node, finlab);
647 append_to_statement_list (x, &q->repl_stmt);
648 }
649
650 /* Similar, but easier, for GOTO_EXPR. */
651
652 static void
653 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
654 {
655 tree x;
656
657 q->cont_stmt = q->stmt;
658 if (mod)
659 append_to_statement_list (mod, &q->repl_stmt);
660
661 x = build1 (GOTO_EXPR, void_type_node, finlab);
662 append_to_statement_list (x, &q->repl_stmt);
663 }
664
665 /* We want to transform
666 try { body; } catch { stuff; }
667 to
668 body; goto over; lab: stuff; over:
669
670 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
671 should be placed before the second operand, or NULL. OVER is
672 an existing label that should be put at the exit, or NULL. */
673
674 static void
675 frob_into_branch_around (tree *tp, tree lab, tree over)
676 {
677 tree x, op1;
678
679 op1 = TREE_OPERAND (*tp, 1);
680 *tp = TREE_OPERAND (*tp, 0);
681
682 if (block_may_fallthru (*tp))
683 {
684 if (!over)
685 over = create_artificial_label ();
686 x = build1 (GOTO_EXPR, void_type_node, over);
687 append_to_statement_list (x, tp);
688 }
689
690 if (lab)
691 {
692 x = build1 (LABEL_EXPR, void_type_node, lab);
693 append_to_statement_list (x, tp);
694 }
695
696 append_to_statement_list (op1, tp);
697
698 if (over)
699 {
700 x = build1 (LABEL_EXPR, void_type_node, over);
701 append_to_statement_list (x, tp);
702 }
703 }
704
705 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
706 Make sure to record all new labels found. */
707
708 static tree
709 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
710 {
711 tree region = NULL;
712
713 t = unsave_expr_now (t);
714
715 if (outer_state->tf)
716 region = outer_state->tf->try_finally_expr;
717 collect_finally_tree (t, region);
718
719 return t;
720 }
721
722 /* A subroutine of lower_try_finally. Create a fallthru label for
723 the given try_finally state. The only tricky bit here is that
724 we have to make sure to record the label in our outer context. */
725
726 static tree
727 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
728 {
729 tree label = tf->fallthru_label;
730 if (!label)
731 {
732 label = create_artificial_label ();
733 tf->fallthru_label = label;
734 if (tf->outer->tf)
735 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
736 }
737 return label;
738 }
739
740 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
741 returns non-null, then the language requires that the exception path out
742 of a try_finally be treated specially. To wit: the code within the
743 finally block may not itself throw an exception. We have two choices here.
744 First we can duplicate the finally block and wrap it in a must_not_throw
745 region. Second, we can generate code like
746
747 try {
748 finally_block;
749 } catch {
750 if (fintmp == eh_edge)
751 protect_cleanup_actions;
752 }
753
754 where "fintmp" is the temporary used in the switch statement generation
755 alternative considered below. For the nonce, we always choose the first
756 option.
757
758 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
759
760 static void
761 honor_protect_cleanup_actions (struct leh_state *outer_state,
762 struct leh_state *this_state,
763 struct leh_tf_state *tf)
764 {
765 tree protect_cleanup_actions, finally, x;
766 tree_stmt_iterator i;
767 bool finally_may_fallthru;
768
769 /* First check for nothing to do. */
770 if (lang_protect_cleanup_actions)
771 protect_cleanup_actions = lang_protect_cleanup_actions ();
772 else
773 protect_cleanup_actions = NULL;
774
775 finally = TREE_OPERAND (*tf->top_p, 1);
776
777 /* If the EH case of the finally block can fall through, this may be a
778 structure of the form
779 try {
780 try {
781 throw ...;
782 } cleanup {
783 try {
784 throw ...;
785 } catch (...) {
786 }
787 }
788 } catch (...) {
789 yyy;
790 }
791 E.g. with an inline destructor with an embedded try block. In this
792 case we must save the runtime EH data around the nested exception.
793
794 This complication means that any time the previous runtime data might
795 be used (via fallthru from the finally) we handle the eh case here,
796 whether or not protect_cleanup_actions is active. */
797
798 finally_may_fallthru = block_may_fallthru (finally);
799 if (!finally_may_fallthru && !protect_cleanup_actions)
800 return;
801
802 /* Duplicate the FINALLY block. Only need to do this for try-finally,
803 and not for cleanups. */
804 if (this_state)
805 finally = lower_try_finally_dup_block (finally, outer_state);
806
807 /* Resume execution after the exception. Adding this now lets
808 lower_eh_filter not add unnecessary gotos, as it is clear that
809 we never fallthru from this copy of the finally block. */
810 if (finally_may_fallthru)
811 {
812 tree save_eptr, save_filt;
813
814 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
815 save_filt = create_tmp_var (integer_type_node, "save_filt");
816
817 i = tsi_start (finally);
818 x = build (EXC_PTR_EXPR, ptr_type_node);
819 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
820 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
821
822 x = build (FILTER_EXPR, integer_type_node);
823 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
824 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
825
826 i = tsi_last (finally);
827 x = build (EXC_PTR_EXPR, ptr_type_node);
828 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
829 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
830
831 x = build (FILTER_EXPR, integer_type_node);
832 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
833 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
834
835 x = build1 (RESX_EXPR, void_type_node,
836 build_int_cst (NULL_TREE,
837 get_eh_region_number (tf->region)));
838 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
839 }
840
841 /* Wrap the block with protect_cleanup_actions as the action. */
842 if (protect_cleanup_actions)
843 {
844 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
845 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
846 EH_FILTER_MUST_NOT_THROW (x) = 1;
847 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
848 lower_eh_filter (outer_state, &finally);
849 }
850 else
851 lower_eh_constructs_1 (outer_state, &finally);
852
853 /* Hook this up to the end of the existing try block. If we
854 previously fell through the end, we'll have to branch around.
855 This means adding a new goto, and adding it to the queue. */
856
857 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
858
859 if (tf->may_fallthru)
860 {
861 x = lower_try_finally_fallthru_label (tf);
862 x = build1 (GOTO_EXPR, void_type_node, x);
863 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
864
865 if (this_state)
866 maybe_record_in_goto_queue (this_state, x);
867
868 tf->may_fallthru = false;
869 }
870
871 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
872 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
873 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
874
875 /* Having now been handled, EH isn't to be considered with
876 the rest of the outgoing edges. */
877 tf->may_throw = false;
878 }
879
880 /* A subroutine of lower_try_finally. We have determined that there is
881 no fallthru edge out of the finally block. This means that there is
882 no outgoing edge corresponding to any incoming edge. Restructure the
883 try_finally node for this special case. */
884
885 static void
886 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
887 {
888 tree x, finally, lab, return_val;
889 struct goto_queue_node *q, *qe;
890
891 if (tf->may_throw)
892 lab = tf->eh_label;
893 else
894 lab = create_artificial_label ();
895
896 finally = TREE_OPERAND (*tf->top_p, 1);
897 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
898
899 x = build1 (LABEL_EXPR, void_type_node, lab);
900 append_to_statement_list (x, tf->top_p);
901
902 return_val = NULL;
903 q = tf->goto_queue;
904 qe = q + tf->goto_queue_active;
905 for (; q < qe; ++q)
906 if (q->index < 0)
907 do_return_redirection (q, lab, NULL, &return_val);
908 else
909 do_goto_redirection (q, lab, NULL);
910
911 replace_goto_queue (tf);
912
913 lower_eh_constructs_1 (state, &finally);
914 append_to_statement_list (finally, tf->top_p);
915 }
916
917 /* A subroutine of lower_try_finally. We have determined that there is
918 exactly one destination of the finally block. Restructure the
919 try_finally node for this special case. */
920
921 static void
922 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
923 {
924 struct goto_queue_node *q, *qe;
925 tree x, finally, finally_label;
926
927 finally = TREE_OPERAND (*tf->top_p, 1);
928 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
929
930 lower_eh_constructs_1 (state, &finally);
931
932 if (tf->may_throw)
933 {
934 /* Only reachable via the exception edge. Add the given label to
935 the head of the FINALLY block. Append a RESX at the end. */
936
937 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
938 append_to_statement_list (x, tf->top_p);
939
940 append_to_statement_list (finally, tf->top_p);
941
942 x = build1 (RESX_EXPR, void_type_node,
943 build_int_cst (NULL_TREE,
944 get_eh_region_number (tf->region)));
945 append_to_statement_list (x, tf->top_p);
946
947 return;
948 }
949
950 if (tf->may_fallthru)
951 {
952 /* Only reachable via the fallthru edge. Do nothing but let
953 the two blocks run together; we'll fall out the bottom. */
954 append_to_statement_list (finally, tf->top_p);
955 return;
956 }
957
958 finally_label = create_artificial_label ();
959 x = build1 (LABEL_EXPR, void_type_node, finally_label);
960 append_to_statement_list (x, tf->top_p);
961
962 append_to_statement_list (finally, tf->top_p);
963
964 q = tf->goto_queue;
965 qe = q + tf->goto_queue_active;
966
967 if (tf->may_return)
968 {
969 /* Reachable by return expressions only. Redirect them. */
970 tree return_val = NULL;
971 for (; q < qe; ++q)
972 do_return_redirection (q, finally_label, NULL, &return_val);
973 replace_goto_queue (tf);
974 }
975 else
976 {
977 /* Reachable by goto expressions only. Redirect them. */
978 for (; q < qe; ++q)
979 do_goto_redirection (q, finally_label, NULL);
980 replace_goto_queue (tf);
981
982 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
983 {
984 /* Reachable by goto to fallthru label only. Redirect it
985 to the new label (already created, sadly), and do not
986 emit the final branch out, or the fallthru label. */
987 tf->fallthru_label = NULL;
988 return;
989 }
990 }
991
992 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
993 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
994 }
995
996 /* A subroutine of lower_try_finally. There are multiple edges incoming
997 and outgoing from the finally block. Implement this by duplicating the
998 finally block for every destination. */
999
1000 static void
1001 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1002 {
1003 tree finally, new_stmt;
1004 tree x;
1005
1006 finally = TREE_OPERAND (*tf->top_p, 1);
1007 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1008
1009 new_stmt = NULL_TREE;
1010
1011 if (tf->may_fallthru)
1012 {
1013 x = lower_try_finally_dup_block (finally, state);
1014 lower_eh_constructs_1 (state, &x);
1015 append_to_statement_list (x, &new_stmt);
1016
1017 x = lower_try_finally_fallthru_label (tf);
1018 x = build1 (GOTO_EXPR, void_type_node, x);
1019 append_to_statement_list (x, &new_stmt);
1020 }
1021
1022 if (tf->may_throw)
1023 {
1024 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1025 append_to_statement_list (x, &new_stmt);
1026
1027 x = lower_try_finally_dup_block (finally, state);
1028 lower_eh_constructs_1 (state, &x);
1029 append_to_statement_list (x, &new_stmt);
1030
1031 x = build1 (RESX_EXPR, void_type_node,
1032 build_int_cst (NULL_TREE,
1033 get_eh_region_number (tf->region)));
1034 append_to_statement_list (x, &new_stmt);
1035 }
1036
1037 if (tf->goto_queue)
1038 {
1039 struct goto_queue_node *q, *qe;
1040 tree return_val = NULL;
1041 int return_index;
1042 tree *labels;
1043
1044 if (tf->dest_array)
1045 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1046 else
1047 return_index = 0;
1048 labels = xcalloc (sizeof (tree), return_index + 1);
1049
1050 q = tf->goto_queue;
1051 qe = q + tf->goto_queue_active;
1052 for (; q < qe; q++)
1053 {
1054 int index = q->index < 0 ? return_index : q->index;
1055 tree lab = labels[index];
1056 bool build_p = false;
1057
1058 if (!lab)
1059 {
1060 labels[index] = lab = create_artificial_label ();
1061 build_p = true;
1062 }
1063
1064 if (index == return_index)
1065 do_return_redirection (q, lab, NULL, &return_val);
1066 else
1067 do_goto_redirection (q, lab, NULL);
1068
1069 if (build_p)
1070 {
1071 x = build1 (LABEL_EXPR, void_type_node, lab);
1072 append_to_statement_list (x, &new_stmt);
1073
1074 x = lower_try_finally_dup_block (finally, state);
1075 lower_eh_constructs_1 (state, &x);
1076 append_to_statement_list (x, &new_stmt);
1077
1078 append_to_statement_list (q->cont_stmt, &new_stmt);
1079 maybe_record_in_goto_queue (state, q->cont_stmt);
1080 }
1081 }
1082 replace_goto_queue (tf);
1083 free (labels);
1084 }
1085
1086 /* Need to link new stmts after running replace_goto_queue due
1087 to not wanting to process the same goto stmts twice. */
1088 append_to_statement_list (new_stmt, tf->top_p);
1089 }
1090
1091 /* A subroutine of lower_try_finally. There are multiple edges incoming
1092 and outgoing from the finally block. Implement this by instrumenting
1093 each incoming edge and creating a switch statement at the end of the
1094 finally block that branches to the appropriate destination. */
1095
1096 static void
1097 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1098 {
1099 struct goto_queue_node *q, *qe;
1100 tree return_val = NULL;
1101 tree finally, finally_tmp, finally_label;
1102 int return_index, eh_index, fallthru_index;
1103 int nlabels, ndests, j, last_case_index;
1104 tree case_label_vec, switch_stmt, last_case, switch_body;
1105 tree x;
1106
1107 /* Mash the TRY block to the head of the chain. */
1108 finally = TREE_OPERAND (*tf->top_p, 1);
1109 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1110
1111 /* Lower the finally block itself. */
1112 lower_eh_constructs_1 (state, &finally);
1113
1114 /* Prepare for switch statement generation. */
1115 if (tf->dest_array)
1116 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1117 else
1118 nlabels = 0;
1119 return_index = nlabels;
1120 eh_index = return_index + tf->may_return;
1121 fallthru_index = eh_index + tf->may_throw;
1122 ndests = fallthru_index + tf->may_fallthru;
1123
1124 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1125 finally_label = create_artificial_label ();
1126
1127 case_label_vec = make_tree_vec (ndests);
1128 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1129 NULL_TREE, case_label_vec);
1130 switch_body = NULL;
1131 last_case = NULL;
1132 last_case_index = 0;
1133
1134 /* Begin inserting code for getting to the finally block. Things
1135 are done in this order to correspond to the sequence the code is
1136 layed out. */
1137
1138 if (tf->may_fallthru)
1139 {
1140 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1141 build_int_cst (NULL_TREE, fallthru_index));
1142 append_to_statement_list (x, tf->top_p);
1143
1144 if (tf->may_throw)
1145 {
1146 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1147 append_to_statement_list (x, tf->top_p);
1148 }
1149
1150
1151 last_case = build (CASE_LABEL_EXPR, void_type_node,
1152 build_int_cst (NULL_TREE, fallthru_index), NULL,
1153 create_artificial_label ());
1154 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1155 last_case_index++;
1156
1157 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1158 append_to_statement_list (x, &switch_body);
1159
1160 x = lower_try_finally_fallthru_label (tf);
1161 x = build1 (GOTO_EXPR, void_type_node, x);
1162 append_to_statement_list (x, &switch_body);
1163 }
1164
1165 if (tf->may_throw)
1166 {
1167 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1168 append_to_statement_list (x, tf->top_p);
1169
1170 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1171 build_int_cst (NULL_TREE, eh_index));
1172 append_to_statement_list (x, tf->top_p);
1173
1174 last_case = build (CASE_LABEL_EXPR, void_type_node,
1175 build_int_cst (NULL_TREE, eh_index), NULL,
1176 create_artificial_label ());
1177 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1178 last_case_index++;
1179
1180 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1181 append_to_statement_list (x, &switch_body);
1182 x = build1 (RESX_EXPR, void_type_node,
1183 build_int_cst (NULL_TREE,
1184 get_eh_region_number (tf->region)));
1185 append_to_statement_list (x, &switch_body);
1186 }
1187
1188 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1189 append_to_statement_list (x, tf->top_p);
1190
1191 append_to_statement_list (finally, tf->top_p);
1192
1193 /* Redirect each incoming goto edge. */
1194 q = tf->goto_queue;
1195 qe = q + tf->goto_queue_active;
1196 j = last_case_index + tf->may_return;
1197 last_case_index += nlabels;
1198 for (; q < qe; ++q)
1199 {
1200 tree mod;
1201 int switch_id, case_index;
1202
1203 if (q->index < 0)
1204 {
1205 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1206 build_int_cst (NULL_TREE, return_index));
1207 do_return_redirection (q, finally_label, mod, &return_val);
1208 switch_id = return_index;
1209 }
1210 else
1211 {
1212 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1213 build_int_cst (NULL_TREE, q->index));
1214 do_goto_redirection (q, finally_label, mod);
1215 switch_id = q->index;
1216 }
1217
1218 case_index = j + q->index;
1219 if (!TREE_VEC_ELT (case_label_vec, case_index))
1220 {
1221 last_case = build (CASE_LABEL_EXPR, void_type_node,
1222 build_int_cst (NULL_TREE, switch_id), NULL,
1223 create_artificial_label ());
1224 TREE_VEC_ELT (case_label_vec, case_index) = last_case;
1225
1226 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1227 append_to_statement_list (x, &switch_body);
1228 append_to_statement_list (q->cont_stmt, &switch_body);
1229 maybe_record_in_goto_queue (state, q->cont_stmt);
1230 }
1231 }
1232 replace_goto_queue (tf);
1233 last_case_index += nlabels;
1234
1235 /* Make sure that the last case is the default label, as one is required.
1236 Then sort the labels, which is also required in GIMPLE. */
1237 CASE_LOW (last_case) = NULL;
1238 sort_case_labels (case_label_vec);
1239
1240 /* Need to link switch_stmt after running replace_goto_queue due
1241 to not wanting to process the same goto stmts twice. */
1242 append_to_statement_list (switch_stmt, tf->top_p);
1243 append_to_statement_list (switch_body, tf->top_p);
1244 }
1245
1246 /* Decide whether or not we are going to duplicate the finally block.
1247 There are several considerations.
1248
1249 First, if this is Java, then the finally block contains code
1250 written by the user. It has line numbers associated with it,
1251 so duplicating the block means it's difficult to set a breakpoint.
1252 Since controlling code generation via -g is verboten, we simply
1253 never duplicate code without optimization.
1254
1255 Second, we'd like to prevent egregious code growth. One way to
1256 do this is to estimate the size of the finally block, multiply
1257 that by the number of copies we'd need to make, and compare against
1258 the estimate of the size of the switch machinery we'd have to add. */
1259
1260 static bool
1261 decide_copy_try_finally (int ndests, tree finally)
1262 {
1263 int f_estimate, sw_estimate;
1264
1265 if (!optimize)
1266 return false;
1267
1268 /* Finally estimate N times, plus N gotos. */
1269 f_estimate = estimate_num_insns (finally);
1270 f_estimate = (f_estimate + 1) * ndests;
1271
1272 /* Switch statement (cost 10), N variable assignments, N gotos. */
1273 sw_estimate = 10 + 2 * ndests;
1274
1275 /* Optimize for size clearly wants our best guess. */
1276 if (optimize_size)
1277 return f_estimate < sw_estimate;
1278
1279 /* ??? These numbers are completely made up so far. */
1280 if (optimize > 1)
1281 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1282 else
1283 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1284 }
1285
1286 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1287 to a sequence of labels and blocks, plus the exception region trees
1288 that record all the magic. This is complicated by the need to
1289 arrange for the FINALLY block to be executed on all exits. */
1290
1291 static void
1292 lower_try_finally (struct leh_state *state, tree *tp)
1293 {
1294 struct leh_tf_state this_tf;
1295 struct leh_state this_state;
1296 int ndests;
1297
1298 /* Process the try block. */
1299
1300 memset (&this_tf, 0, sizeof (this_tf));
1301 this_tf.try_finally_expr = *tp;
1302 this_tf.top_p = tp;
1303 this_tf.outer = state;
1304 if (using_eh_for_cleanups_p)
1305 this_tf.region
1306 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1307 else
1308 this_tf.region = NULL;
1309
1310 this_state.cur_region = this_tf.region;
1311 this_state.prev_try = state->prev_try;
1312 this_state.tf = &this_tf;
1313
1314 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1315
1316 /* Determine if the try block is escaped through the bottom. */
1317 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1318
1319 /* Determine if any exceptions are possible within the try block. */
1320 if (using_eh_for_cleanups_p)
1321 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1322 if (this_tf.may_throw)
1323 {
1324 this_tf.eh_label = create_artificial_label ();
1325 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1326 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1327 }
1328
1329 /* Sort the goto queue for efficient searching later. */
1330 if (this_tf.goto_queue_active > 1)
1331 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1332 sizeof (struct goto_queue_node), goto_queue_cmp);
1333
1334 /* Determine how many edges (still) reach the finally block. Or rather,
1335 how many destinations are reached by the finally block. Use this to
1336 determine how we process the finally block itself. */
1337
1338 if (this_tf.dest_array)
1339 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1340 else
1341 ndests = 0;
1342 ndests += this_tf.may_fallthru;
1343 ndests += this_tf.may_return;
1344 ndests += this_tf.may_throw;
1345
1346 /* If the FINALLY block is not reachable, dike it out. */
1347 if (ndests == 0)
1348 *tp = TREE_OPERAND (*tp, 0);
1349
1350 /* If the finally block doesn't fall through, then any destination
1351 we might try to impose there isn't reached either. There may be
1352 some minor amount of cleanup and redirection still needed. */
1353 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1354 lower_try_finally_nofallthru (state, &this_tf);
1355
1356 /* We can easily special-case redirection to a single destination. */
1357 else if (ndests == 1)
1358 lower_try_finally_onedest (state, &this_tf);
1359
1360 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1361 lower_try_finally_copy (state, &this_tf);
1362 else
1363 lower_try_finally_switch (state, &this_tf);
1364
1365 /* If someone requested we add a label at the end of the transformed
1366 block, do so. */
1367 if (this_tf.fallthru_label)
1368 {
1369 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1370 append_to_statement_list (x, tp);
1371 }
1372
1373 if (this_tf.goto_queue)
1374 free (this_tf.goto_queue);
1375 }
1376
1377 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1378 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1379 exception region trees that record all the magic. */
1380
1381 static void
1382 lower_catch (struct leh_state *state, tree *tp)
1383 {
1384 struct eh_region *try_region;
1385 struct leh_state this_state;
1386 tree_stmt_iterator i;
1387 tree out_label;
1388
1389 try_region = gen_eh_region_try (state->cur_region);
1390 this_state.cur_region = try_region;
1391 this_state.prev_try = try_region;
1392 this_state.tf = state->tf;
1393
1394 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1395
1396 if (!get_eh_region_may_contain_throw (try_region))
1397 {
1398 *tp = TREE_OPERAND (*tp, 0);
1399 return;
1400 }
1401
1402 out_label = NULL;
1403 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1404 {
1405 struct eh_region *catch_region;
1406 tree catch, x, eh_label;
1407
1408 catch = tsi_stmt (i);
1409 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1410
1411 this_state.cur_region = catch_region;
1412 this_state.prev_try = state->prev_try;
1413 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1414
1415 eh_label = create_artificial_label ();
1416 set_eh_region_tree_label (catch_region, eh_label);
1417
1418 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1419 tsi_link_before (&i, x, TSI_SAME_STMT);
1420
1421 if (block_may_fallthru (CATCH_BODY (catch)))
1422 {
1423 if (!out_label)
1424 out_label = create_artificial_label ();
1425
1426 x = build1 (GOTO_EXPR, void_type_node, out_label);
1427 append_to_statement_list (x, &CATCH_BODY (catch));
1428 }
1429
1430 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1431 tsi_delink (&i);
1432 }
1433
1434 frob_into_branch_around (tp, NULL, out_label);
1435 }
1436
1437 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1438 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1439 region trees that record all the magic. */
1440
1441 static void
1442 lower_eh_filter (struct leh_state *state, tree *tp)
1443 {
1444 struct leh_state this_state;
1445 struct eh_region *this_region;
1446 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1447 tree eh_label;
1448
1449 if (EH_FILTER_MUST_NOT_THROW (inner))
1450 this_region = gen_eh_region_must_not_throw (state->cur_region);
1451 else
1452 this_region = gen_eh_region_allowed (state->cur_region,
1453 EH_FILTER_TYPES (inner));
1454 this_state = *state;
1455 this_state.cur_region = this_region;
1456
1457 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1458
1459 if (!get_eh_region_may_contain_throw (this_region))
1460 {
1461 *tp = TREE_OPERAND (*tp, 0);
1462 return;
1463 }
1464
1465 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1466 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1467
1468 eh_label = create_artificial_label ();
1469 set_eh_region_tree_label (this_region, eh_label);
1470
1471 frob_into_branch_around (tp, eh_label, NULL);
1472 }
1473
1474 /* Implement a cleanup expression. This is similar to try-finally,
1475 except that we only execute the cleanup block for exception edges. */
1476
1477 static void
1478 lower_cleanup (struct leh_state *state, tree *tp)
1479 {
1480 struct leh_state this_state;
1481 struct eh_region *this_region;
1482 struct leh_tf_state fake_tf;
1483
1484 /* If not using eh, then exception-only cleanups are no-ops. */
1485 if (!flag_exceptions)
1486 {
1487 *tp = TREE_OPERAND (*tp, 0);
1488 lower_eh_constructs_1 (state, tp);
1489 return;
1490 }
1491
1492 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1493 this_state = *state;
1494 this_state.cur_region = this_region;
1495
1496 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1497
1498 if (!get_eh_region_may_contain_throw (this_region))
1499 {
1500 *tp = TREE_OPERAND (*tp, 0);
1501 return;
1502 }
1503
1504 /* Build enough of a try-finally state so that we can reuse
1505 honor_protect_cleanup_actions. */
1506 memset (&fake_tf, 0, sizeof (fake_tf));
1507 fake_tf.top_p = tp;
1508 fake_tf.outer = state;
1509 fake_tf.region = this_region;
1510 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1511 fake_tf.may_throw = true;
1512
1513 fake_tf.eh_label = create_artificial_label ();
1514 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1515
1516 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1517
1518 if (fake_tf.may_throw)
1519 {
1520 /* In this case honor_protect_cleanup_actions had nothing to do,
1521 and we should process this normally. */
1522 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1523 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1524 }
1525 else
1526 {
1527 /* In this case honor_protect_cleanup_actions did nearly all of
1528 the work. All we have left is to append the fallthru_label. */
1529
1530 *tp = TREE_OPERAND (*tp, 0);
1531 if (fake_tf.fallthru_label)
1532 {
1533 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1534 append_to_statement_list (x, tp);
1535 }
1536 }
1537 }
1538
1539 /* Main loop for lowering eh constructs. */
1540
1541 static void
1542 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1543 {
1544 tree_stmt_iterator i;
1545 tree t = *tp;
1546
1547 switch (TREE_CODE (t))
1548 {
1549 case COND_EXPR:
1550 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1551 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1552 break;
1553
1554 case CALL_EXPR:
1555 /* Look for things that can throw exceptions, and record them. */
1556 if (state->cur_region && tree_could_throw_p (t))
1557 {
1558 record_stmt_eh_region (state->cur_region, t);
1559 note_eh_region_may_contain_throw (state->cur_region);
1560 }
1561 break;
1562
1563 case MODIFY_EXPR:
1564 /* Look for things that can throw exceptions, and record them. */
1565 if (state->cur_region && tree_could_throw_p (t))
1566 {
1567 tree op;
1568
1569 record_stmt_eh_region (state->cur_region, t);
1570 note_eh_region_may_contain_throw (state->cur_region);
1571
1572 /* ??? For the benefit of calls.c, converting all this to rtl,
1573 we need to record the call expression, not just the outer
1574 modify statement. */
1575 op = get_call_expr_in (t);
1576 if (op)
1577 record_stmt_eh_region (state->cur_region, op);
1578 }
1579 break;
1580
1581 case GOTO_EXPR:
1582 case RETURN_EXPR:
1583 maybe_record_in_goto_queue (state, t);
1584 break;
1585 case SWITCH_EXPR:
1586 verify_norecord_switch_expr (state, t);
1587 break;
1588
1589 case TRY_FINALLY_EXPR:
1590 lower_try_finally (state, tp);
1591 break;
1592
1593 case TRY_CATCH_EXPR:
1594 i = tsi_start (TREE_OPERAND (t, 1));
1595 switch (TREE_CODE (tsi_stmt (i)))
1596 {
1597 case CATCH_EXPR:
1598 lower_catch (state, tp);
1599 break;
1600 case EH_FILTER_EXPR:
1601 lower_eh_filter (state, tp);
1602 break;
1603 default:
1604 lower_cleanup (state, tp);
1605 break;
1606 }
1607 break;
1608
1609 case STATEMENT_LIST:
1610 for (i = tsi_start (t); !tsi_end_p (i); )
1611 {
1612 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1613 t = tsi_stmt (i);
1614 if (TREE_CODE (t) == STATEMENT_LIST)
1615 {
1616 tsi_link_before (&i, t, TSI_SAME_STMT);
1617 tsi_delink (&i);
1618 }
1619 else
1620 tsi_next (&i);
1621 }
1622 break;
1623
1624 default:
1625 /* A type, a decl, or some kind of statement that we're not
1626 interested in. Don't walk them. */
1627 break;
1628 }
1629 }
1630
1631 static void
1632 lower_eh_constructs (void)
1633 {
1634 struct leh_state null_state;
1635 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1636
1637 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1638 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
1639 ggc_free);
1640
1641 collect_finally_tree (*tp, NULL);
1642
1643 memset (&null_state, 0, sizeof (null_state));
1644 lower_eh_constructs_1 (&null_state, tp);
1645
1646 htab_delete (finally_tree);
1647
1648 collect_eh_region_array ();
1649 }
1650
1651 struct tree_opt_pass pass_lower_eh =
1652 {
1653 "eh", /* name */
1654 NULL, /* gate */
1655 lower_eh_constructs, /* execute */
1656 NULL, /* sub */
1657 NULL, /* next */
1658 0, /* static_pass_number */
1659 TV_TREE_EH, /* tv_id */
1660 PROP_gimple_lcf, /* properties_required */
1661 PROP_gimple_leh, /* properties_provided */
1662 PROP_gimple_lcf, /* properties_destroyed */
1663 0, /* todo_flags_start */
1664 TODO_dump_func, /* todo_flags_finish */
1665 0 /* letter */
1666 };
1667
1668 \f
1669 /* Construct EH edges for STMT. */
1670
1671 static void
1672 make_eh_edge (struct eh_region *region, void *data)
1673 {
1674 tree stmt, lab;
1675 basic_block src, dst;
1676
1677 stmt = data;
1678 lab = get_eh_region_tree_label (region);
1679
1680 src = bb_for_stmt (stmt);
1681 dst = label_to_block (lab);
1682
1683 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1684 }
1685
1686 void
1687 make_eh_edges (tree stmt)
1688 {
1689 int region_nr;
1690 bool is_resx;
1691
1692 if (TREE_CODE (stmt) == RESX_EXPR)
1693 {
1694 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1695 is_resx = true;
1696 }
1697 else
1698 {
1699 region_nr = lookup_stmt_eh_region (stmt);
1700 if (region_nr < 0)
1701 return;
1702 is_resx = false;
1703 }
1704
1705 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1706 }
1707
1708
1709 \f
1710 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1711 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1712 This routine expects only GIMPLE lhs or rhs input. */
1713
1714 bool
1715 tree_could_trap_p (tree expr)
1716 {
1717 enum tree_code code = TREE_CODE (expr);
1718 bool honor_nans = false;
1719 bool honor_snans = false;
1720 bool fp_operation = false;
1721 bool honor_trapv = false;
1722 tree t, base;
1723
1724 if (TREE_CODE_CLASS (code) == tcc_comparison
1725 || TREE_CODE_CLASS (code) == tcc_unary
1726 || TREE_CODE_CLASS (code) == tcc_binary)
1727 {
1728 t = TREE_TYPE (expr);
1729 fp_operation = FLOAT_TYPE_P (t);
1730 if (fp_operation)
1731 {
1732 honor_nans = flag_trapping_math && !flag_finite_math_only;
1733 honor_snans = flag_signaling_nans != 0;
1734 }
1735 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1736 honor_trapv = true;
1737 }
1738
1739 restart:
1740 switch (code)
1741 {
1742 case COMPONENT_REF:
1743 case REALPART_EXPR:
1744 case IMAGPART_EXPR:
1745 case BIT_FIELD_REF:
1746 case WITH_SIZE_EXPR:
1747 expr = TREE_OPERAND (expr, 0);
1748 code = TREE_CODE (expr);
1749 goto restart;
1750
1751 case ARRAY_RANGE_REF:
1752 /* Let us be conservative here for now. We might be checking bounds of
1753 the access similarly to the case below. */
1754 if (!TREE_THIS_NOTRAP (expr))
1755 return true;
1756
1757 base = TREE_OPERAND (expr, 0);
1758 return tree_could_trap_p (base);
1759
1760 case ARRAY_REF:
1761 base = TREE_OPERAND (expr, 0);
1762 if (tree_could_trap_p (base))
1763 return true;
1764
1765 if (TREE_THIS_NOTRAP (expr))
1766 return false;
1767
1768 return !in_array_bounds_p (expr);
1769
1770 case INDIRECT_REF:
1771 case ALIGN_INDIRECT_REF:
1772 case MISALIGNED_INDIRECT_REF:
1773 return !TREE_THIS_NOTRAP (expr);
1774
1775 case ASM_EXPR:
1776 return TREE_THIS_VOLATILE (expr);
1777
1778 case TRUNC_DIV_EXPR:
1779 case CEIL_DIV_EXPR:
1780 case FLOOR_DIV_EXPR:
1781 case ROUND_DIV_EXPR:
1782 case EXACT_DIV_EXPR:
1783 case CEIL_MOD_EXPR:
1784 case FLOOR_MOD_EXPR:
1785 case ROUND_MOD_EXPR:
1786 case TRUNC_MOD_EXPR:
1787 case RDIV_EXPR:
1788 if (honor_snans || honor_trapv)
1789 return true;
1790 if (fp_operation && flag_trapping_math)
1791 return true;
1792 t = TREE_OPERAND (expr, 1);
1793 if (!TREE_CONSTANT (t) || integer_zerop (t))
1794 return true;
1795 return false;
1796
1797 case LT_EXPR:
1798 case LE_EXPR:
1799 case GT_EXPR:
1800 case GE_EXPR:
1801 case LTGT_EXPR:
1802 /* Some floating point comparisons may trap. */
1803 return honor_nans;
1804
1805 case EQ_EXPR:
1806 case NE_EXPR:
1807 case UNORDERED_EXPR:
1808 case ORDERED_EXPR:
1809 case UNLT_EXPR:
1810 case UNLE_EXPR:
1811 case UNGT_EXPR:
1812 case UNGE_EXPR:
1813 case UNEQ_EXPR:
1814 return honor_snans;
1815
1816 case CONVERT_EXPR:
1817 case FIX_TRUNC_EXPR:
1818 case FIX_CEIL_EXPR:
1819 case FIX_FLOOR_EXPR:
1820 case FIX_ROUND_EXPR:
1821 /* Conversion of floating point might trap. */
1822 return honor_nans;
1823
1824 case NEGATE_EXPR:
1825 case ABS_EXPR:
1826 case CONJ_EXPR:
1827 /* These operations don't trap with floating point. */
1828 if (honor_trapv)
1829 return true;
1830 return false;
1831
1832 case PLUS_EXPR:
1833 case MINUS_EXPR:
1834 case MULT_EXPR:
1835 /* Any floating arithmetic may trap. */
1836 if (fp_operation && flag_trapping_math)
1837 return true;
1838 if (honor_trapv)
1839 return true;
1840 return false;
1841
1842 case CALL_EXPR:
1843 t = get_callee_fndecl (expr);
1844 /* Assume that calls to weak functions may trap. */
1845 if (!t || !DECL_P (t) || DECL_WEAK (t))
1846 return true;
1847 return false;
1848
1849 default:
1850 /* Any floating arithmetic may trap. */
1851 if (fp_operation && flag_trapping_math)
1852 return true;
1853 return false;
1854 }
1855 }
1856
1857 bool
1858 tree_could_throw_p (tree t)
1859 {
1860 if (!flag_exceptions)
1861 return false;
1862 if (TREE_CODE (t) == MODIFY_EXPR)
1863 {
1864 if (flag_non_call_exceptions
1865 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1866 return true;
1867 t = TREE_OPERAND (t, 1);
1868 }
1869
1870 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1871 t = TREE_OPERAND (t, 0);
1872 if (TREE_CODE (t) == CALL_EXPR)
1873 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1874 if (flag_non_call_exceptions)
1875 return tree_could_trap_p (t);
1876 return false;
1877 }
1878
1879 bool
1880 tree_can_throw_internal (tree stmt)
1881 {
1882 int region_nr = lookup_stmt_eh_region (stmt);
1883 if (region_nr < 0)
1884 return false;
1885 return can_throw_internal_1 (region_nr);
1886 }
1887
1888 bool
1889 tree_can_throw_external (tree stmt)
1890 {
1891 int region_nr = lookup_stmt_eh_region (stmt);
1892 if (region_nr < 0)
1893 return false;
1894 return can_throw_external_1 (region_nr);
1895 }
1896
1897 bool
1898 maybe_clean_eh_stmt (tree stmt)
1899 {
1900 if (!tree_could_throw_p (stmt))
1901 if (remove_stmt_from_eh_region (stmt))
1902 return true;
1903 return false;
1904 }
1905
1906 #include "gt-tree-eh.h"