cgraphunit.c, [...]: Fix typos and follow spelling conventions in error/dump messages.
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39
40 \f
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
43
44 void
45 using_eh_for_cleanups (void)
46 {
47 using_eh_for_cleanups_p = 1;
48 }
49 \f
50 /* Misc functions used in this file. */
51
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
55
56 static int
57 struct_ptr_eq (const void *a, const void *b)
58 {
59 const void * const * x = a;
60 const void * const * y = b;
61 return *x == *y;
62 }
63
64 static hashval_t
65 struct_ptr_hash (const void *a)
66 {
67 const void * const * x = a;
68 return (size_t)*x >> 4;
69 }
70
71 \f
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
75
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
79
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
83
84 static void
85 record_stmt_eh_region (struct eh_region *region, tree t)
86 {
87 if (!region)
88 return;
89
90 add_stmt_to_eh_region (t, get_eh_region_number (region));
91 }
92
93 void
94 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
95 {
96 struct throw_stmt_node *n;
97 void **slot;
98
99 gcc_assert (num >= 0);
100 gcc_assert (TREE_CODE (t) != RESX_EXPR);
101
102 n = ggc_alloc (sizeof (*n));
103 n->stmt = t;
104 n->region_nr = num;
105
106 if (!get_eh_throw_stmt_table (ifun))
107 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
108 struct_ptr_eq,
109 ggc_free));
110
111 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
112 gcc_assert (!*slot);
113 *slot = n;
114 /* ??? For the benefit of calls.c, converting all this to rtl,
115 we need to record the call expression, not just the outer
116 modify statement. */
117 if (TREE_CODE (t) == MODIFY_EXPR
118 && (t = get_call_expr_in (t)))
119 add_stmt_to_eh_region_fn (ifun, t, num);
120 }
121
122 void
123 add_stmt_to_eh_region (tree t, int num)
124 {
125 add_stmt_to_eh_region_fn (cfun, t, num);
126 }
127
128 bool
129 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
130 {
131 struct throw_stmt_node dummy;
132 void **slot;
133
134 if (!get_eh_throw_stmt_table (ifun))
135 return false;
136
137 dummy.stmt = t;
138 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
139 NO_INSERT);
140 if (slot)
141 {
142 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
143 /* ??? For the benefit of calls.c, converting all this to rtl,
144 we need to record the call expression, not just the outer
145 modify statement. */
146 if (TREE_CODE (t) == MODIFY_EXPR
147 && (t = get_call_expr_in (t)))
148 remove_stmt_from_eh_region_fn (ifun, t);
149 return true;
150 }
151 else
152 return false;
153 }
154
155 bool
156 remove_stmt_from_eh_region (tree t)
157 {
158 return remove_stmt_from_eh_region_fn (cfun, t);
159 }
160
161 int
162 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
163 {
164 struct throw_stmt_node *p, n;
165
166 if (!get_eh_throw_stmt_table (ifun))
167 return -2;
168
169 n.stmt = t;
170 p = htab_find (get_eh_throw_stmt_table (ifun), &n);
171
172 return (p ? p->region_nr : -1);
173 }
174
175 int
176 lookup_stmt_eh_region (tree t)
177 {
178 /* We can get called from initialized data when -fnon-call-exceptions
179 is on; prevent crash. */
180 if (!cfun)
181 return -1;
182 return lookup_stmt_eh_region_fn (cfun, t);
183 }
184
185 \f
186 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
187 nodes and LABEL_DECL nodes. We will use this during the second phase to
188 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
189
190 struct finally_tree_node
191 {
192 tree child, parent;
193 };
194
195 /* Note that this table is *not* marked GTY. It is short-lived. */
196 static htab_t finally_tree;
197
198 static void
199 record_in_finally_tree (tree child, tree parent)
200 {
201 struct finally_tree_node *n;
202 void **slot;
203
204 n = xmalloc (sizeof (*n));
205 n->child = child;
206 n->parent = parent;
207
208 slot = htab_find_slot (finally_tree, n, INSERT);
209 gcc_assert (!*slot);
210 *slot = n;
211 }
212
213 static void
214 collect_finally_tree (tree t, tree region)
215 {
216 tailrecurse:
217 switch (TREE_CODE (t))
218 {
219 case LABEL_EXPR:
220 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
221 break;
222
223 case TRY_FINALLY_EXPR:
224 record_in_finally_tree (t, region);
225 collect_finally_tree (TREE_OPERAND (t, 0), t);
226 t = TREE_OPERAND (t, 1);
227 goto tailrecurse;
228
229 case TRY_CATCH_EXPR:
230 collect_finally_tree (TREE_OPERAND (t, 0), region);
231 t = TREE_OPERAND (t, 1);
232 goto tailrecurse;
233
234 case CATCH_EXPR:
235 t = CATCH_BODY (t);
236 goto tailrecurse;
237
238 case EH_FILTER_EXPR:
239 t = EH_FILTER_FAILURE (t);
240 goto tailrecurse;
241
242 case STATEMENT_LIST:
243 {
244 tree_stmt_iterator i;
245 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
246 collect_finally_tree (tsi_stmt (i), region);
247 }
248 break;
249
250 default:
251 /* A type, a decl, or some kind of statement that we're not
252 interested in. Don't walk them. */
253 break;
254 }
255 }
256
257 /* Use the finally tree to determine if a jump from START to TARGET
258 would leave the try_finally node that START lives in. */
259
260 static bool
261 outside_finally_tree (tree start, tree target)
262 {
263 struct finally_tree_node n, *p;
264
265 do
266 {
267 n.child = start;
268 p = htab_find (finally_tree, &n);
269 if (!p)
270 return true;
271 start = p->parent;
272 }
273 while (start != target);
274
275 return false;
276 }
277 \f
278 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
279 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
280 The eh region creation is straight-forward, but frobbing all the gotos
281 and such into shape isn't. */
282
283 /* State of the world while lowering. */
284
285 struct leh_state
286 {
287 /* What's "current" while constructing the eh region tree. These
288 correspond to variables of the same name in cfun->eh, which we
289 don't have easy access to. */
290 struct eh_region *cur_region;
291 struct eh_region *prev_try;
292
293 /* Processing of TRY_FINALLY requires a bit more state. This is
294 split out into a separate structure so that we don't have to
295 copy so much when processing other nodes. */
296 struct leh_tf_state *tf;
297 };
298
299 struct leh_tf_state
300 {
301 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
302 is the original TRY_FINALLY_EXPR. We need to retain this so that
303 outside_finally_tree can reliably reference the tree used in the
304 collect_finally_tree data structures. */
305 tree try_finally_expr;
306 tree *top_p;
307
308 /* The state outside this try_finally node. */
309 struct leh_state *outer;
310
311 /* The exception region created for it. */
312 struct eh_region *region;
313
314 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
315 that are seen to escape this TRY_FINALLY_EXPR node. */
316 struct goto_queue_node {
317 tree stmt;
318 tree repl_stmt;
319 tree cont_stmt;
320 int index;
321 } *goto_queue;
322 size_t goto_queue_size;
323 size_t goto_queue_active;
324
325 /* The set of unique labels seen as entries in the goto queue. */
326 VEC(tree,heap) *dest_array;
327
328 /* A label to be added at the end of the completed transformed
329 sequence. It will be set if may_fallthru was true *at one time*,
330 though subsequent transformations may have cleared that flag. */
331 tree fallthru_label;
332
333 /* A label that has been registered with except.c to be the
334 landing pad for this try block. */
335 tree eh_label;
336
337 /* True if it is possible to fall out the bottom of the try block.
338 Cleared if the fallthru is converted to a goto. */
339 bool may_fallthru;
340
341 /* True if any entry in goto_queue is a RETURN_EXPR. */
342 bool may_return;
343
344 /* True if the finally block can receive an exception edge.
345 Cleared if the exception case is handled by code duplication. */
346 bool may_throw;
347 };
348
349 static void lower_eh_filter (struct leh_state *, tree *);
350 static void lower_eh_constructs_1 (struct leh_state *, tree *);
351
352 /* Comparison function for qsort/bsearch. We're interested in
353 searching goto queue elements for source statements. */
354
355 static int
356 goto_queue_cmp (const void *x, const void *y)
357 {
358 tree a = ((const struct goto_queue_node *)x)->stmt;
359 tree b = ((const struct goto_queue_node *)y)->stmt;
360 return (a == b ? 0 : a < b ? -1 : 1);
361 }
362
363 /* Search for STMT in the goto queue. Return the replacement,
364 or null if the statement isn't in the queue. */
365
366 static tree
367 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
368 {
369 struct goto_queue_node tmp, *ret;
370 tmp.stmt = stmt;
371 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
372 sizeof (struct goto_queue_node), goto_queue_cmp);
373 return (ret ? ret->repl_stmt : NULL);
374 }
375
376 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
377 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
378 then we can just splat it in, otherwise we add the new stmts immediately
379 after the COND_EXPR and redirect. */
380
381 static void
382 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
383 tree_stmt_iterator *tsi)
384 {
385 tree new, one, label;
386
387 new = find_goto_replacement (tf, *tp);
388 if (!new)
389 return;
390
391 one = expr_only (new);
392 if (one && TREE_CODE (one) == GOTO_EXPR)
393 {
394 *tp = one;
395 return;
396 }
397
398 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
399 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
400
401 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
402 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
403 }
404
405 /* The real work of replace_goto_queue. Returns with TSI updated to
406 point to the next statement. */
407
408 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
409
410 static void
411 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
412 {
413 switch (TREE_CODE (t))
414 {
415 case GOTO_EXPR:
416 case RETURN_EXPR:
417 t = find_goto_replacement (tf, t);
418 if (t)
419 {
420 tsi_link_before (tsi, t, TSI_SAME_STMT);
421 tsi_delink (tsi);
422 return;
423 }
424 break;
425
426 case COND_EXPR:
427 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
428 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
429 break;
430
431 case TRY_FINALLY_EXPR:
432 case TRY_CATCH_EXPR:
433 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
434 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
435 break;
436 case CATCH_EXPR:
437 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
438 break;
439 case EH_FILTER_EXPR:
440 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
441 break;
442
443 case STATEMENT_LIST:
444 gcc_unreachable ();
445
446 default:
447 /* These won't have gotos in them. */
448 break;
449 }
450
451 tsi_next (tsi);
452 }
453
454 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
455
456 static void
457 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
458 {
459 tree_stmt_iterator i = tsi_start (t);
460 while (!tsi_end_p (i))
461 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
462 }
463
464 /* Replace all goto queue members. */
465
466 static void
467 replace_goto_queue (struct leh_tf_state *tf)
468 {
469 if (tf->goto_queue_active == 0)
470 return;
471 replace_goto_queue_stmt_list (*tf->top_p, tf);
472 }
473
474 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
475 node, and if so record that fact in the goto queue associated with that
476 try_finally node. */
477
478 static void
479 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
480 {
481 struct leh_tf_state *tf = state->tf;
482 struct goto_queue_node *q;
483 size_t active, size;
484 int index;
485
486 if (!tf)
487 return;
488
489 switch (TREE_CODE (stmt))
490 {
491 case GOTO_EXPR:
492 {
493 tree lab = GOTO_DESTINATION (stmt);
494
495 /* Computed and non-local gotos do not get processed. Given
496 their nature we can neither tell whether we've escaped the
497 finally block nor redirect them if we knew. */
498 if (TREE_CODE (lab) != LABEL_DECL)
499 return;
500
501 /* No need to record gotos that don't leave the try block. */
502 if (! outside_finally_tree (lab, tf->try_finally_expr))
503 return;
504
505 if (! tf->dest_array)
506 {
507 tf->dest_array = VEC_alloc (tree, heap, 10);
508 VEC_quick_push (tree, tf->dest_array, lab);
509 index = 0;
510 }
511 else
512 {
513 int n = VEC_length (tree, tf->dest_array);
514 for (index = 0; index < n; ++index)
515 if (VEC_index (tree, tf->dest_array, index) == lab)
516 break;
517 if (index == n)
518 VEC_safe_push (tree, heap, tf->dest_array, lab);
519 }
520 }
521 break;
522
523 case RETURN_EXPR:
524 tf->may_return = true;
525 index = -1;
526 break;
527
528 default:
529 gcc_unreachable ();
530 }
531
532 active = tf->goto_queue_active;
533 size = tf->goto_queue_size;
534 if (active >= size)
535 {
536 size = (size ? size * 2 : 32);
537 tf->goto_queue_size = size;
538 tf->goto_queue
539 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
540 }
541
542 q = &tf->goto_queue[active];
543 tf->goto_queue_active = active + 1;
544
545 memset (q, 0, sizeof (*q));
546 q->stmt = stmt;
547 q->index = index;
548 }
549
550 #ifdef ENABLE_CHECKING
551 /* We do not process SWITCH_EXPRs for now. As long as the original source
552 was in fact structured, and we've not yet done jump threading, then none
553 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
554
555 static void
556 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
557 {
558 struct leh_tf_state *tf = state->tf;
559 size_t i, n;
560 tree vec;
561
562 if (!tf)
563 return;
564
565 vec = SWITCH_LABELS (switch_expr);
566 n = TREE_VEC_LENGTH (vec);
567
568 for (i = 0; i < n; ++i)
569 {
570 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
571 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
572 }
573 }
574 #else
575 #define verify_norecord_switch_expr(state, switch_expr)
576 #endif
577
578 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
579 whatever is needed to finish the return. If MOD is non-null, insert it
580 before the new branch. RETURN_VALUE_P is a cache containing a temporary
581 variable to be used in manipulating the value returned from the function. */
582
583 static void
584 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
585 tree *return_value_p)
586 {
587 tree ret_expr = TREE_OPERAND (q->stmt, 0);
588 tree x;
589
590 if (ret_expr)
591 {
592 /* The nasty part about redirecting the return value is that the
593 return value itself is to be computed before the FINALLY block
594 is executed. e.g.
595
596 int x;
597 int foo (void)
598 {
599 x = 0;
600 try {
601 return x;
602 } finally {
603 x++;
604 }
605 }
606
607 should return 0, not 1. Arrange for this to happen by copying
608 computed the return value into a local temporary. This also
609 allows us to redirect multiple return statements through the
610 same destination block; whether this is a net win or not really
611 depends, I guess, but it does make generation of the switch in
612 lower_try_finally_switch easier. */
613
614 switch (TREE_CODE (ret_expr))
615 {
616 case RESULT_DECL:
617 if (!*return_value_p)
618 *return_value_p = ret_expr;
619 else
620 gcc_assert (*return_value_p == ret_expr);
621 q->cont_stmt = q->stmt;
622 break;
623
624 case MODIFY_EXPR:
625 {
626 tree result = TREE_OPERAND (ret_expr, 0);
627 tree new, old = TREE_OPERAND (ret_expr, 1);
628
629 if (!*return_value_p)
630 {
631 if (aggregate_value_p (TREE_TYPE (result),
632 TREE_TYPE (current_function_decl)))
633 /* If this function returns in memory, copy the argument
634 into the return slot now. Otherwise, we might need to
635 worry about magic return semantics, so we need to use a
636 temporary to hold the value until we're actually ready
637 to return. */
638 new = result;
639 else
640 new = create_tmp_var (TREE_TYPE (old), "rettmp");
641 *return_value_p = new;
642 }
643 else
644 new = *return_value_p;
645
646 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
647 append_to_statement_list (x, &q->repl_stmt);
648
649 if (new == result)
650 x = result;
651 else
652 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
653 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
654 }
655
656 default:
657 gcc_unreachable ();
658 }
659 }
660 else
661 {
662 /* If we don't return a value, all return statements are the same. */
663 q->cont_stmt = q->stmt;
664 }
665
666 if (mod)
667 append_to_statement_list (mod, &q->repl_stmt);
668
669 x = build1 (GOTO_EXPR, void_type_node, finlab);
670 append_to_statement_list (x, &q->repl_stmt);
671 }
672
673 /* Similar, but easier, for GOTO_EXPR. */
674
675 static void
676 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
677 {
678 tree x;
679
680 q->cont_stmt = q->stmt;
681 if (mod)
682 append_to_statement_list (mod, &q->repl_stmt);
683
684 x = build1 (GOTO_EXPR, void_type_node, finlab);
685 append_to_statement_list (x, &q->repl_stmt);
686 }
687
688 /* We want to transform
689 try { body; } catch { stuff; }
690 to
691 body; goto over; lab: stuff; over:
692
693 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
694 should be placed before the second operand, or NULL. OVER is
695 an existing label that should be put at the exit, or NULL. */
696
697 static void
698 frob_into_branch_around (tree *tp, tree lab, tree over)
699 {
700 tree x, op1;
701
702 op1 = TREE_OPERAND (*tp, 1);
703 *tp = TREE_OPERAND (*tp, 0);
704
705 if (block_may_fallthru (*tp))
706 {
707 if (!over)
708 over = create_artificial_label ();
709 x = build1 (GOTO_EXPR, void_type_node, over);
710 append_to_statement_list (x, tp);
711 }
712
713 if (lab)
714 {
715 x = build1 (LABEL_EXPR, void_type_node, lab);
716 append_to_statement_list (x, tp);
717 }
718
719 append_to_statement_list (op1, tp);
720
721 if (over)
722 {
723 x = build1 (LABEL_EXPR, void_type_node, over);
724 append_to_statement_list (x, tp);
725 }
726 }
727
728 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
729 Make sure to record all new labels found. */
730
731 static tree
732 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
733 {
734 tree region = NULL;
735
736 t = unsave_expr_now (t);
737
738 if (outer_state->tf)
739 region = outer_state->tf->try_finally_expr;
740 collect_finally_tree (t, region);
741
742 return t;
743 }
744
745 /* A subroutine of lower_try_finally. Create a fallthru label for
746 the given try_finally state. The only tricky bit here is that
747 we have to make sure to record the label in our outer context. */
748
749 static tree
750 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
751 {
752 tree label = tf->fallthru_label;
753 if (!label)
754 {
755 label = create_artificial_label ();
756 tf->fallthru_label = label;
757 if (tf->outer->tf)
758 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
759 }
760 return label;
761 }
762
763 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
764 returns non-null, then the language requires that the exception path out
765 of a try_finally be treated specially. To wit: the code within the
766 finally block may not itself throw an exception. We have two choices here.
767 First we can duplicate the finally block and wrap it in a must_not_throw
768 region. Second, we can generate code like
769
770 try {
771 finally_block;
772 } catch {
773 if (fintmp == eh_edge)
774 protect_cleanup_actions;
775 }
776
777 where "fintmp" is the temporary used in the switch statement generation
778 alternative considered below. For the nonce, we always choose the first
779 option.
780
781 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
782
783 static void
784 honor_protect_cleanup_actions (struct leh_state *outer_state,
785 struct leh_state *this_state,
786 struct leh_tf_state *tf)
787 {
788 tree protect_cleanup_actions, finally, x;
789 tree_stmt_iterator i;
790 bool finally_may_fallthru;
791
792 /* First check for nothing to do. */
793 if (lang_protect_cleanup_actions)
794 protect_cleanup_actions = lang_protect_cleanup_actions ();
795 else
796 protect_cleanup_actions = NULL;
797
798 finally = TREE_OPERAND (*tf->top_p, 1);
799
800 /* If the EH case of the finally block can fall through, this may be a
801 structure of the form
802 try {
803 try {
804 throw ...;
805 } cleanup {
806 try {
807 throw ...;
808 } catch (...) {
809 }
810 }
811 } catch (...) {
812 yyy;
813 }
814 E.g. with an inline destructor with an embedded try block. In this
815 case we must save the runtime EH data around the nested exception.
816
817 This complication means that any time the previous runtime data might
818 be used (via fallthru from the finally) we handle the eh case here,
819 whether or not protect_cleanup_actions is active. */
820
821 finally_may_fallthru = block_may_fallthru (finally);
822 if (!finally_may_fallthru && !protect_cleanup_actions)
823 return;
824
825 /* Duplicate the FINALLY block. Only need to do this for try-finally,
826 and not for cleanups. */
827 if (this_state)
828 finally = lower_try_finally_dup_block (finally, outer_state);
829
830 /* Resume execution after the exception. Adding this now lets
831 lower_eh_filter not add unnecessary gotos, as it is clear that
832 we never fallthru from this copy of the finally block. */
833 if (finally_may_fallthru)
834 {
835 tree save_eptr, save_filt;
836
837 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
838 save_filt = create_tmp_var (integer_type_node, "save_filt");
839
840 i = tsi_start (finally);
841 x = build (EXC_PTR_EXPR, ptr_type_node);
842 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
843 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
844
845 x = build (FILTER_EXPR, integer_type_node);
846 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
847 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
848
849 i = tsi_last (finally);
850 x = build (EXC_PTR_EXPR, ptr_type_node);
851 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
852 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
853
854 x = build (FILTER_EXPR, integer_type_node);
855 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
856 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
857
858 x = build_resx (get_eh_region_number (tf->region));
859 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
860 }
861
862 /* Wrap the block with protect_cleanup_actions as the action. */
863 if (protect_cleanup_actions)
864 {
865 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
866 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
867 EH_FILTER_MUST_NOT_THROW (x) = 1;
868 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
869 lower_eh_filter (outer_state, &finally);
870 }
871 else
872 lower_eh_constructs_1 (outer_state, &finally);
873
874 /* Hook this up to the end of the existing try block. If we
875 previously fell through the end, we'll have to branch around.
876 This means adding a new goto, and adding it to the queue. */
877
878 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
879
880 if (tf->may_fallthru)
881 {
882 x = lower_try_finally_fallthru_label (tf);
883 x = build1 (GOTO_EXPR, void_type_node, x);
884 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
885
886 if (this_state)
887 maybe_record_in_goto_queue (this_state, x);
888
889 tf->may_fallthru = false;
890 }
891
892 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
893 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
894 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
895
896 /* Having now been handled, EH isn't to be considered with
897 the rest of the outgoing edges. */
898 tf->may_throw = false;
899 }
900
901 /* A subroutine of lower_try_finally. We have determined that there is
902 no fallthru edge out of the finally block. This means that there is
903 no outgoing edge corresponding to any incoming edge. Restructure the
904 try_finally node for this special case. */
905
906 static void
907 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
908 {
909 tree x, finally, lab, return_val;
910 struct goto_queue_node *q, *qe;
911
912 if (tf->may_throw)
913 lab = tf->eh_label;
914 else
915 lab = create_artificial_label ();
916
917 finally = TREE_OPERAND (*tf->top_p, 1);
918 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
919
920 x = build1 (LABEL_EXPR, void_type_node, lab);
921 append_to_statement_list (x, tf->top_p);
922
923 return_val = NULL;
924 q = tf->goto_queue;
925 qe = q + tf->goto_queue_active;
926 for (; q < qe; ++q)
927 if (q->index < 0)
928 do_return_redirection (q, lab, NULL, &return_val);
929 else
930 do_goto_redirection (q, lab, NULL);
931
932 replace_goto_queue (tf);
933
934 lower_eh_constructs_1 (state, &finally);
935 append_to_statement_list (finally, tf->top_p);
936 }
937
938 /* A subroutine of lower_try_finally. We have determined that there is
939 exactly one destination of the finally block. Restructure the
940 try_finally node for this special case. */
941
942 static void
943 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
944 {
945 struct goto_queue_node *q, *qe;
946 tree x, finally, finally_label;
947
948 finally = TREE_OPERAND (*tf->top_p, 1);
949 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
950
951 lower_eh_constructs_1 (state, &finally);
952
953 if (tf->may_throw)
954 {
955 /* Only reachable via the exception edge. Add the given label to
956 the head of the FINALLY block. Append a RESX at the end. */
957
958 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
959 append_to_statement_list (x, tf->top_p);
960
961 append_to_statement_list (finally, tf->top_p);
962
963 x = build_resx (get_eh_region_number (tf->region));
964
965 append_to_statement_list (x, tf->top_p);
966
967 return;
968 }
969
970 if (tf->may_fallthru)
971 {
972 /* Only reachable via the fallthru edge. Do nothing but let
973 the two blocks run together; we'll fall out the bottom. */
974 append_to_statement_list (finally, tf->top_p);
975 return;
976 }
977
978 finally_label = create_artificial_label ();
979 x = build1 (LABEL_EXPR, void_type_node, finally_label);
980 append_to_statement_list (x, tf->top_p);
981
982 append_to_statement_list (finally, tf->top_p);
983
984 q = tf->goto_queue;
985 qe = q + tf->goto_queue_active;
986
987 if (tf->may_return)
988 {
989 /* Reachable by return expressions only. Redirect them. */
990 tree return_val = NULL;
991 for (; q < qe; ++q)
992 do_return_redirection (q, finally_label, NULL, &return_val);
993 replace_goto_queue (tf);
994 }
995 else
996 {
997 /* Reachable by goto expressions only. Redirect them. */
998 for (; q < qe; ++q)
999 do_goto_redirection (q, finally_label, NULL);
1000 replace_goto_queue (tf);
1001
1002 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1003 {
1004 /* Reachable by goto to fallthru label only. Redirect it
1005 to the new label (already created, sadly), and do not
1006 emit the final branch out, or the fallthru label. */
1007 tf->fallthru_label = NULL;
1008 return;
1009 }
1010 }
1011
1012 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1013 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1014 }
1015
1016 /* A subroutine of lower_try_finally. There are multiple edges incoming
1017 and outgoing from the finally block. Implement this by duplicating the
1018 finally block for every destination. */
1019
1020 static void
1021 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1022 {
1023 tree finally, new_stmt;
1024 tree x;
1025
1026 finally = TREE_OPERAND (*tf->top_p, 1);
1027 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1028
1029 new_stmt = NULL_TREE;
1030
1031 if (tf->may_fallthru)
1032 {
1033 x = lower_try_finally_dup_block (finally, state);
1034 lower_eh_constructs_1 (state, &x);
1035 append_to_statement_list (x, &new_stmt);
1036
1037 x = lower_try_finally_fallthru_label (tf);
1038 x = build1 (GOTO_EXPR, void_type_node, x);
1039 append_to_statement_list (x, &new_stmt);
1040 }
1041
1042 if (tf->may_throw)
1043 {
1044 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1045 append_to_statement_list (x, &new_stmt);
1046
1047 x = lower_try_finally_dup_block (finally, state);
1048 lower_eh_constructs_1 (state, &x);
1049 append_to_statement_list (x, &new_stmt);
1050
1051 x = build_resx (get_eh_region_number (tf->region));
1052 append_to_statement_list (x, &new_stmt);
1053 }
1054
1055 if (tf->goto_queue)
1056 {
1057 struct goto_queue_node *q, *qe;
1058 tree return_val = NULL;
1059 int return_index, index;
1060 struct
1061 {
1062 struct goto_queue_node *q;
1063 tree label;
1064 } *labels;
1065
1066 return_index = VEC_length (tree, tf->dest_array);
1067 labels = xcalloc (sizeof (*labels), return_index + 1);
1068
1069 q = tf->goto_queue;
1070 qe = q + tf->goto_queue_active;
1071 for (; q < qe; q++)
1072 {
1073 index = q->index < 0 ? return_index : q->index;
1074
1075 if (!labels[index].q)
1076 labels[index].q = q;
1077 }
1078
1079 for (index = 0; index < return_index + 1; index++)
1080 {
1081 tree lab;
1082
1083 q = labels[index].q;
1084 if (! q)
1085 continue;
1086
1087 lab = labels[index].label = create_artificial_label ();
1088
1089 if (index == return_index)
1090 do_return_redirection (q, lab, NULL, &return_val);
1091 else
1092 do_goto_redirection (q, lab, NULL);
1093
1094 x = build1 (LABEL_EXPR, void_type_node, lab);
1095 append_to_statement_list (x, &new_stmt);
1096
1097 x = lower_try_finally_dup_block (finally, state);
1098 lower_eh_constructs_1 (state, &x);
1099 append_to_statement_list (x, &new_stmt);
1100
1101 append_to_statement_list (q->cont_stmt, &new_stmt);
1102 maybe_record_in_goto_queue (state, q->cont_stmt);
1103 }
1104
1105 for (q = tf->goto_queue; q < qe; q++)
1106 {
1107 tree lab;
1108
1109 index = q->index < 0 ? return_index : q->index;
1110
1111 if (labels[index].q == q)
1112 continue;
1113
1114 lab = labels[index].label;
1115
1116 if (index == return_index)
1117 do_return_redirection (q, lab, NULL, &return_val);
1118 else
1119 do_goto_redirection (q, lab, NULL);
1120 }
1121
1122 replace_goto_queue (tf);
1123 free (labels);
1124 }
1125
1126 /* Need to link new stmts after running replace_goto_queue due
1127 to not wanting to process the same goto stmts twice. */
1128 append_to_statement_list (new_stmt, tf->top_p);
1129 }
1130
1131 /* A subroutine of lower_try_finally. There are multiple edges incoming
1132 and outgoing from the finally block. Implement this by instrumenting
1133 each incoming edge and creating a switch statement at the end of the
1134 finally block that branches to the appropriate destination. */
1135
1136 static void
1137 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1138 {
1139 struct goto_queue_node *q, *qe;
1140 tree return_val = NULL;
1141 tree finally, finally_tmp, finally_label;
1142 int return_index, eh_index, fallthru_index;
1143 int nlabels, ndests, j, last_case_index;
1144 tree case_label_vec, switch_stmt, last_case, switch_body;
1145 tree x;
1146
1147 /* Mash the TRY block to the head of the chain. */
1148 finally = TREE_OPERAND (*tf->top_p, 1);
1149 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1150
1151 /* Lower the finally block itself. */
1152 lower_eh_constructs_1 (state, &finally);
1153
1154 /* Prepare for switch statement generation. */
1155 nlabels = VEC_length (tree, tf->dest_array);
1156 return_index = nlabels;
1157 eh_index = return_index + tf->may_return;
1158 fallthru_index = eh_index + tf->may_throw;
1159 ndests = fallthru_index + tf->may_fallthru;
1160
1161 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1162 finally_label = create_artificial_label ();
1163
1164 case_label_vec = make_tree_vec (ndests);
1165 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1166 NULL_TREE, case_label_vec);
1167 switch_body = NULL;
1168 last_case = NULL;
1169 last_case_index = 0;
1170
1171 /* Begin inserting code for getting to the finally block. Things
1172 are done in this order to correspond to the sequence the code is
1173 layed out. */
1174
1175 if (tf->may_fallthru)
1176 {
1177 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1178 build_int_cst (NULL_TREE, fallthru_index));
1179 append_to_statement_list (x, tf->top_p);
1180
1181 if (tf->may_throw)
1182 {
1183 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1184 append_to_statement_list (x, tf->top_p);
1185 }
1186
1187
1188 last_case = build (CASE_LABEL_EXPR, void_type_node,
1189 build_int_cst (NULL_TREE, fallthru_index), NULL,
1190 create_artificial_label ());
1191 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1192 last_case_index++;
1193
1194 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1195 append_to_statement_list (x, &switch_body);
1196
1197 x = lower_try_finally_fallthru_label (tf);
1198 x = build1 (GOTO_EXPR, void_type_node, x);
1199 append_to_statement_list (x, &switch_body);
1200 }
1201
1202 if (tf->may_throw)
1203 {
1204 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1205 append_to_statement_list (x, tf->top_p);
1206
1207 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1208 build_int_cst (NULL_TREE, eh_index));
1209 append_to_statement_list (x, tf->top_p);
1210
1211 last_case = build (CASE_LABEL_EXPR, void_type_node,
1212 build_int_cst (NULL_TREE, eh_index), NULL,
1213 create_artificial_label ());
1214 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1215 last_case_index++;
1216
1217 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1218 append_to_statement_list (x, &switch_body);
1219 x = build_resx (get_eh_region_number (tf->region));
1220 append_to_statement_list (x, &switch_body);
1221 }
1222
1223 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1224 append_to_statement_list (x, tf->top_p);
1225
1226 append_to_statement_list (finally, tf->top_p);
1227
1228 /* Redirect each incoming goto edge. */
1229 q = tf->goto_queue;
1230 qe = q + tf->goto_queue_active;
1231 j = last_case_index + tf->may_return;
1232 for (; q < qe; ++q)
1233 {
1234 tree mod;
1235 int switch_id, case_index;
1236
1237 if (q->index < 0)
1238 {
1239 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1240 build_int_cst (NULL_TREE, return_index));
1241 do_return_redirection (q, finally_label, mod, &return_val);
1242 switch_id = return_index;
1243 }
1244 else
1245 {
1246 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1247 build_int_cst (NULL_TREE, q->index));
1248 do_goto_redirection (q, finally_label, mod);
1249 switch_id = q->index;
1250 }
1251
1252 case_index = j + q->index;
1253 if (!TREE_VEC_ELT (case_label_vec, case_index))
1254 TREE_VEC_ELT (case_label_vec, case_index)
1255 = build (CASE_LABEL_EXPR, void_type_node,
1256 build_int_cst (NULL_TREE, switch_id), NULL,
1257 /* We store the cont_stmt in the
1258 CASE_LABEL, so that we can recover it
1259 in the loop below. We don't create
1260 the new label while walking the
1261 goto_queue because pointers don't
1262 offer a stable order. */
1263 q->cont_stmt);
1264 }
1265 for (j = last_case_index; j < last_case_index + nlabels; j++)
1266 {
1267 tree label;
1268 tree cont_stmt;
1269
1270 last_case = TREE_VEC_ELT (case_label_vec, j);
1271
1272 gcc_assert (last_case);
1273
1274 cont_stmt = CASE_LABEL (last_case);
1275
1276 label = create_artificial_label ();
1277 CASE_LABEL (last_case) = label;
1278
1279 x = build (LABEL_EXPR, void_type_node, label);
1280 append_to_statement_list (x, &switch_body);
1281 append_to_statement_list (cont_stmt, &switch_body);
1282 maybe_record_in_goto_queue (state, cont_stmt);
1283 }
1284 replace_goto_queue (tf);
1285
1286 /* Make sure that the last case is the default label, as one is required.
1287 Then sort the labels, which is also required in GIMPLE. */
1288 CASE_LOW (last_case) = NULL;
1289 sort_case_labels (case_label_vec);
1290
1291 /* Need to link switch_stmt after running replace_goto_queue due
1292 to not wanting to process the same goto stmts twice. */
1293 append_to_statement_list (switch_stmt, tf->top_p);
1294 append_to_statement_list (switch_body, tf->top_p);
1295 }
1296
1297 /* Decide whether or not we are going to duplicate the finally block.
1298 There are several considerations.
1299
1300 First, if this is Java, then the finally block contains code
1301 written by the user. It has line numbers associated with it,
1302 so duplicating the block means it's difficult to set a breakpoint.
1303 Since controlling code generation via -g is verboten, we simply
1304 never duplicate code without optimization.
1305
1306 Second, we'd like to prevent egregious code growth. One way to
1307 do this is to estimate the size of the finally block, multiply
1308 that by the number of copies we'd need to make, and compare against
1309 the estimate of the size of the switch machinery we'd have to add. */
1310
1311 static bool
1312 decide_copy_try_finally (int ndests, tree finally)
1313 {
1314 int f_estimate, sw_estimate;
1315
1316 if (!optimize)
1317 return false;
1318
1319 /* Finally estimate N times, plus N gotos. */
1320 f_estimate = estimate_num_insns (finally);
1321 f_estimate = (f_estimate + 1) * ndests;
1322
1323 /* Switch statement (cost 10), N variable assignments, N gotos. */
1324 sw_estimate = 10 + 2 * ndests;
1325
1326 /* Optimize for size clearly wants our best guess. */
1327 if (optimize_size)
1328 return f_estimate < sw_estimate;
1329
1330 /* ??? These numbers are completely made up so far. */
1331 if (optimize > 1)
1332 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1333 else
1334 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1335 }
1336
1337 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1338 to a sequence of labels and blocks, plus the exception region trees
1339 that record all the magic. This is complicated by the need to
1340 arrange for the FINALLY block to be executed on all exits. */
1341
1342 static void
1343 lower_try_finally (struct leh_state *state, tree *tp)
1344 {
1345 struct leh_tf_state this_tf;
1346 struct leh_state this_state;
1347 int ndests;
1348
1349 /* Process the try block. */
1350
1351 memset (&this_tf, 0, sizeof (this_tf));
1352 this_tf.try_finally_expr = *tp;
1353 this_tf.top_p = tp;
1354 this_tf.outer = state;
1355 if (using_eh_for_cleanups_p)
1356 this_tf.region
1357 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1358 else
1359 this_tf.region = NULL;
1360
1361 this_state.cur_region = this_tf.region;
1362 this_state.prev_try = state->prev_try;
1363 this_state.tf = &this_tf;
1364
1365 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1366
1367 /* Determine if the try block is escaped through the bottom. */
1368 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1369
1370 /* Determine if any exceptions are possible within the try block. */
1371 if (using_eh_for_cleanups_p)
1372 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1373 if (this_tf.may_throw)
1374 {
1375 this_tf.eh_label = create_artificial_label ();
1376 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1377 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1378 }
1379
1380 /* Sort the goto queue for efficient searching later. */
1381 if (this_tf.goto_queue_active > 1)
1382 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1383 sizeof (struct goto_queue_node), goto_queue_cmp);
1384
1385 /* Determine how many edges (still) reach the finally block. Or rather,
1386 how many destinations are reached by the finally block. Use this to
1387 determine how we process the finally block itself. */
1388
1389 ndests = VEC_length (tree, this_tf.dest_array);
1390 ndests += this_tf.may_fallthru;
1391 ndests += this_tf.may_return;
1392 ndests += this_tf.may_throw;
1393
1394 /* If the FINALLY block is not reachable, dike it out. */
1395 if (ndests == 0)
1396 *tp = TREE_OPERAND (*tp, 0);
1397
1398 /* If the finally block doesn't fall through, then any destination
1399 we might try to impose there isn't reached either. There may be
1400 some minor amount of cleanup and redirection still needed. */
1401 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1402 lower_try_finally_nofallthru (state, &this_tf);
1403
1404 /* We can easily special-case redirection to a single destination. */
1405 else if (ndests == 1)
1406 lower_try_finally_onedest (state, &this_tf);
1407
1408 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1409 lower_try_finally_copy (state, &this_tf);
1410 else
1411 lower_try_finally_switch (state, &this_tf);
1412
1413 /* If someone requested we add a label at the end of the transformed
1414 block, do so. */
1415 if (this_tf.fallthru_label)
1416 {
1417 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1418 append_to_statement_list (x, tp);
1419 }
1420
1421 VEC_free (tree, heap, this_tf.dest_array);
1422 if (this_tf.goto_queue)
1423 free (this_tf.goto_queue);
1424 }
1425
1426 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1427 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1428 exception region trees that record all the magic. */
1429
1430 static void
1431 lower_catch (struct leh_state *state, tree *tp)
1432 {
1433 struct eh_region *try_region;
1434 struct leh_state this_state;
1435 tree_stmt_iterator i;
1436 tree out_label;
1437
1438 try_region = gen_eh_region_try (state->cur_region);
1439 this_state.cur_region = try_region;
1440 this_state.prev_try = try_region;
1441 this_state.tf = state->tf;
1442
1443 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1444
1445 if (!get_eh_region_may_contain_throw (try_region))
1446 {
1447 *tp = TREE_OPERAND (*tp, 0);
1448 return;
1449 }
1450
1451 out_label = NULL;
1452 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1453 {
1454 struct eh_region *catch_region;
1455 tree catch, x, eh_label;
1456
1457 catch = tsi_stmt (i);
1458 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1459
1460 this_state.cur_region = catch_region;
1461 this_state.prev_try = state->prev_try;
1462 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1463
1464 eh_label = create_artificial_label ();
1465 set_eh_region_tree_label (catch_region, eh_label);
1466
1467 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1468 tsi_link_before (&i, x, TSI_SAME_STMT);
1469
1470 if (block_may_fallthru (CATCH_BODY (catch)))
1471 {
1472 if (!out_label)
1473 out_label = create_artificial_label ();
1474
1475 x = build1 (GOTO_EXPR, void_type_node, out_label);
1476 append_to_statement_list (x, &CATCH_BODY (catch));
1477 }
1478
1479 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1480 tsi_delink (&i);
1481 }
1482
1483 frob_into_branch_around (tp, NULL, out_label);
1484 }
1485
1486 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1487 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1488 region trees that record all the magic. */
1489
1490 static void
1491 lower_eh_filter (struct leh_state *state, tree *tp)
1492 {
1493 struct leh_state this_state;
1494 struct eh_region *this_region;
1495 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1496 tree eh_label;
1497
1498 if (EH_FILTER_MUST_NOT_THROW (inner))
1499 this_region = gen_eh_region_must_not_throw (state->cur_region);
1500 else
1501 this_region = gen_eh_region_allowed (state->cur_region,
1502 EH_FILTER_TYPES (inner));
1503 this_state = *state;
1504 this_state.cur_region = this_region;
1505
1506 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1507
1508 if (!get_eh_region_may_contain_throw (this_region))
1509 {
1510 *tp = TREE_OPERAND (*tp, 0);
1511 return;
1512 }
1513
1514 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1515 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1516
1517 eh_label = create_artificial_label ();
1518 set_eh_region_tree_label (this_region, eh_label);
1519
1520 frob_into_branch_around (tp, eh_label, NULL);
1521 }
1522
1523 /* Implement a cleanup expression. This is similar to try-finally,
1524 except that we only execute the cleanup block for exception edges. */
1525
1526 static void
1527 lower_cleanup (struct leh_state *state, tree *tp)
1528 {
1529 struct leh_state this_state;
1530 struct eh_region *this_region;
1531 struct leh_tf_state fake_tf;
1532
1533 /* If not using eh, then exception-only cleanups are no-ops. */
1534 if (!flag_exceptions)
1535 {
1536 *tp = TREE_OPERAND (*tp, 0);
1537 lower_eh_constructs_1 (state, tp);
1538 return;
1539 }
1540
1541 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1542 this_state = *state;
1543 this_state.cur_region = this_region;
1544
1545 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1546
1547 if (!get_eh_region_may_contain_throw (this_region))
1548 {
1549 *tp = TREE_OPERAND (*tp, 0);
1550 return;
1551 }
1552
1553 /* Build enough of a try-finally state so that we can reuse
1554 honor_protect_cleanup_actions. */
1555 memset (&fake_tf, 0, sizeof (fake_tf));
1556 fake_tf.top_p = tp;
1557 fake_tf.outer = state;
1558 fake_tf.region = this_region;
1559 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1560 fake_tf.may_throw = true;
1561
1562 fake_tf.eh_label = create_artificial_label ();
1563 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1564
1565 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1566
1567 if (fake_tf.may_throw)
1568 {
1569 /* In this case honor_protect_cleanup_actions had nothing to do,
1570 and we should process this normally. */
1571 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1572 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1573 }
1574 else
1575 {
1576 /* In this case honor_protect_cleanup_actions did nearly all of
1577 the work. All we have left is to append the fallthru_label. */
1578
1579 *tp = TREE_OPERAND (*tp, 0);
1580 if (fake_tf.fallthru_label)
1581 {
1582 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1583 append_to_statement_list (x, tp);
1584 }
1585 }
1586 }
1587
1588 /* Main loop for lowering eh constructs. */
1589
1590 static void
1591 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1592 {
1593 tree_stmt_iterator i;
1594 tree t = *tp;
1595
1596 switch (TREE_CODE (t))
1597 {
1598 case COND_EXPR:
1599 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1600 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1601 break;
1602
1603 case CALL_EXPR:
1604 /* Look for things that can throw exceptions, and record them. */
1605 if (state->cur_region && tree_could_throw_p (t))
1606 {
1607 record_stmt_eh_region (state->cur_region, t);
1608 note_eh_region_may_contain_throw (state->cur_region);
1609 }
1610 break;
1611
1612 case MODIFY_EXPR:
1613 /* Look for things that can throw exceptions, and record them. */
1614 if (state->cur_region && tree_could_throw_p (t))
1615 {
1616 record_stmt_eh_region (state->cur_region, t);
1617 note_eh_region_may_contain_throw (state->cur_region);
1618 }
1619 break;
1620
1621 case GOTO_EXPR:
1622 case RETURN_EXPR:
1623 maybe_record_in_goto_queue (state, t);
1624 break;
1625 case SWITCH_EXPR:
1626 verify_norecord_switch_expr (state, t);
1627 break;
1628
1629 case TRY_FINALLY_EXPR:
1630 lower_try_finally (state, tp);
1631 break;
1632
1633 case TRY_CATCH_EXPR:
1634 i = tsi_start (TREE_OPERAND (t, 1));
1635 switch (TREE_CODE (tsi_stmt (i)))
1636 {
1637 case CATCH_EXPR:
1638 lower_catch (state, tp);
1639 break;
1640 case EH_FILTER_EXPR:
1641 lower_eh_filter (state, tp);
1642 break;
1643 default:
1644 lower_cleanup (state, tp);
1645 break;
1646 }
1647 break;
1648
1649 case STATEMENT_LIST:
1650 for (i = tsi_start (t); !tsi_end_p (i); )
1651 {
1652 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1653 t = tsi_stmt (i);
1654 if (TREE_CODE (t) == STATEMENT_LIST)
1655 {
1656 tsi_link_before (&i, t, TSI_SAME_STMT);
1657 tsi_delink (&i);
1658 }
1659 else
1660 tsi_next (&i);
1661 }
1662 break;
1663
1664 default:
1665 /* A type, a decl, or some kind of statement that we're not
1666 interested in. Don't walk them. */
1667 break;
1668 }
1669 }
1670
1671 static void
1672 lower_eh_constructs (void)
1673 {
1674 struct leh_state null_state;
1675 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1676
1677 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1678
1679 collect_finally_tree (*tp, NULL);
1680
1681 memset (&null_state, 0, sizeof (null_state));
1682 lower_eh_constructs_1 (&null_state, tp);
1683
1684 htab_delete (finally_tree);
1685
1686 collect_eh_region_array ();
1687 }
1688
1689 struct tree_opt_pass pass_lower_eh =
1690 {
1691 "eh", /* name */
1692 NULL, /* gate */
1693 lower_eh_constructs, /* execute */
1694 NULL, /* sub */
1695 NULL, /* next */
1696 0, /* static_pass_number */
1697 TV_TREE_EH, /* tv_id */
1698 PROP_gimple_lcf, /* properties_required */
1699 PROP_gimple_leh, /* properties_provided */
1700 PROP_gimple_lcf, /* properties_destroyed */
1701 0, /* todo_flags_start */
1702 TODO_dump_func, /* todo_flags_finish */
1703 0 /* letter */
1704 };
1705
1706 \f
1707 /* Construct EH edges for STMT. */
1708
1709 static void
1710 make_eh_edge (struct eh_region *region, void *data)
1711 {
1712 tree stmt, lab;
1713 basic_block src, dst;
1714
1715 stmt = data;
1716 lab = get_eh_region_tree_label (region);
1717
1718 src = bb_for_stmt (stmt);
1719 dst = label_to_block (lab);
1720
1721 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1722 }
1723
1724 void
1725 make_eh_edges (tree stmt)
1726 {
1727 int region_nr;
1728 bool is_resx;
1729
1730 if (TREE_CODE (stmt) == RESX_EXPR)
1731 {
1732 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1733 is_resx = true;
1734 }
1735 else
1736 {
1737 region_nr = lookup_stmt_eh_region (stmt);
1738 if (region_nr < 0)
1739 return;
1740 is_resx = false;
1741 }
1742
1743 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1744 }
1745
1746 static bool mark_eh_edge_found_error;
1747
1748 /* Mark edge make_eh_edge would create for given region by setting it aux
1749 field, output error if something goes wrong. */
1750 static void
1751 mark_eh_edge (struct eh_region *region, void *data)
1752 {
1753 tree stmt, lab;
1754 basic_block src, dst;
1755 edge e;
1756
1757 stmt = data;
1758 lab = get_eh_region_tree_label (region);
1759
1760 src = bb_for_stmt (stmt);
1761 dst = label_to_block (lab);
1762
1763 e = find_edge (src, dst);
1764 if (!e)
1765 {
1766 error ("EH edge %i->%i is missing %i %i.", src->index, dst->index, src, dst);
1767 mark_eh_edge_found_error = true;
1768 }
1769 else if (!(e->flags & EDGE_EH))
1770 {
1771 error ("EH edge %i->%i miss EH flag.", src->index, dst->index);
1772 mark_eh_edge_found_error = true;
1773 }
1774 else if (e->aux)
1775 {
1776 /* ??? might not be mistake. */
1777 error ("EH edge %i->%i has duplicated regions.", src->index, dst->index);
1778 mark_eh_edge_found_error = true;
1779 }
1780 else
1781 e->aux = (void *)1;
1782 }
1783
1784 /* Verify that BB containing stmt as last stmt has precisely the edges
1785 make_eh_edges would create. */
1786 bool
1787 verify_eh_edges (tree stmt)
1788 {
1789 int region_nr;
1790 bool is_resx;
1791 basic_block bb = bb_for_stmt (stmt);
1792 edge_iterator ei;
1793 edge e;
1794
1795 FOR_EACH_EDGE (e, ei, bb->succs)
1796 gcc_assert (!e->aux);
1797 mark_eh_edge_found_error = false;
1798 if (TREE_CODE (stmt) == RESX_EXPR)
1799 {
1800 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1801 is_resx = true;
1802 }
1803 else
1804 {
1805 region_nr = lookup_stmt_eh_region (stmt);
1806 if (region_nr < 0)
1807 {
1808 FOR_EACH_EDGE (e, ei, bb->succs)
1809 if (e->flags & EDGE_EH)
1810 {
1811 error ("BB %i can not throw but has EH edges", bb->index);
1812 return true;
1813 }
1814 return false;
1815 }
1816 if (!tree_could_throw_p (stmt))
1817 {
1818 error ("BB %i last statement has incorrectly set region", bb->index);
1819 return true;
1820 }
1821 is_resx = false;
1822 }
1823
1824 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1825 FOR_EACH_EDGE (e, ei, bb->succs)
1826 {
1827 if ((e->flags & EDGE_EH) && !e->aux)
1828 {
1829 error ("Unnecessary EH edge %i->%i", bb->index, e->dest->index);
1830 mark_eh_edge_found_error = true;
1831 return true;
1832 }
1833 e->aux = NULL;
1834 }
1835 return mark_eh_edge_found_error;
1836 }
1837
1838 \f
1839 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1840 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1841 This routine expects only GIMPLE lhs or rhs input. */
1842
1843 bool
1844 tree_could_trap_p (tree expr)
1845 {
1846 enum tree_code code = TREE_CODE (expr);
1847 bool honor_nans = false;
1848 bool honor_snans = false;
1849 bool fp_operation = false;
1850 bool honor_trapv = false;
1851 tree t, base;
1852
1853 if (TREE_CODE_CLASS (code) == tcc_comparison
1854 || TREE_CODE_CLASS (code) == tcc_unary
1855 || TREE_CODE_CLASS (code) == tcc_binary)
1856 {
1857 t = TREE_TYPE (expr);
1858 fp_operation = FLOAT_TYPE_P (t);
1859 if (fp_operation)
1860 {
1861 honor_nans = flag_trapping_math && !flag_finite_math_only;
1862 honor_snans = flag_signaling_nans != 0;
1863 }
1864 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1865 honor_trapv = true;
1866 }
1867
1868 restart:
1869 switch (code)
1870 {
1871 case COMPONENT_REF:
1872 case REALPART_EXPR:
1873 case IMAGPART_EXPR:
1874 case BIT_FIELD_REF:
1875 case WITH_SIZE_EXPR:
1876 expr = TREE_OPERAND (expr, 0);
1877 code = TREE_CODE (expr);
1878 goto restart;
1879
1880 case ARRAY_RANGE_REF:
1881 /* Let us be conservative here for now. We might be checking bounds of
1882 the access similarly to the case below. */
1883 if (!TREE_THIS_NOTRAP (expr))
1884 return true;
1885
1886 base = TREE_OPERAND (expr, 0);
1887 return tree_could_trap_p (base);
1888
1889 case ARRAY_REF:
1890 base = TREE_OPERAND (expr, 0);
1891 if (tree_could_trap_p (base))
1892 return true;
1893
1894 if (TREE_THIS_NOTRAP (expr))
1895 return false;
1896
1897 return !in_array_bounds_p (expr);
1898
1899 case INDIRECT_REF:
1900 case ALIGN_INDIRECT_REF:
1901 case MISALIGNED_INDIRECT_REF:
1902 return !TREE_THIS_NOTRAP (expr);
1903
1904 case ASM_EXPR:
1905 return TREE_THIS_VOLATILE (expr);
1906
1907 case TRUNC_DIV_EXPR:
1908 case CEIL_DIV_EXPR:
1909 case FLOOR_DIV_EXPR:
1910 case ROUND_DIV_EXPR:
1911 case EXACT_DIV_EXPR:
1912 case CEIL_MOD_EXPR:
1913 case FLOOR_MOD_EXPR:
1914 case ROUND_MOD_EXPR:
1915 case TRUNC_MOD_EXPR:
1916 case RDIV_EXPR:
1917 if (honor_snans || honor_trapv)
1918 return true;
1919 if (fp_operation)
1920 return flag_trapping_math;
1921 t = TREE_OPERAND (expr, 1);
1922 if (!TREE_CONSTANT (t) || integer_zerop (t))
1923 return true;
1924 return false;
1925
1926 case LT_EXPR:
1927 case LE_EXPR:
1928 case GT_EXPR:
1929 case GE_EXPR:
1930 case LTGT_EXPR:
1931 /* Some floating point comparisons may trap. */
1932 return honor_nans;
1933
1934 case EQ_EXPR:
1935 case NE_EXPR:
1936 case UNORDERED_EXPR:
1937 case ORDERED_EXPR:
1938 case UNLT_EXPR:
1939 case UNLE_EXPR:
1940 case UNGT_EXPR:
1941 case UNGE_EXPR:
1942 case UNEQ_EXPR:
1943 return honor_snans;
1944
1945 case CONVERT_EXPR:
1946 case FIX_TRUNC_EXPR:
1947 case FIX_CEIL_EXPR:
1948 case FIX_FLOOR_EXPR:
1949 case FIX_ROUND_EXPR:
1950 /* Conversion of floating point might trap. */
1951 return honor_nans;
1952
1953 case NEGATE_EXPR:
1954 case ABS_EXPR:
1955 case CONJ_EXPR:
1956 /* These operations don't trap with floating point. */
1957 if (honor_trapv)
1958 return true;
1959 return false;
1960
1961 case PLUS_EXPR:
1962 case MINUS_EXPR:
1963 case MULT_EXPR:
1964 /* Any floating arithmetic may trap. */
1965 if (fp_operation && flag_trapping_math)
1966 return true;
1967 if (honor_trapv)
1968 return true;
1969 return false;
1970
1971 case CALL_EXPR:
1972 t = get_callee_fndecl (expr);
1973 /* Assume that calls to weak functions may trap. */
1974 if (!t || !DECL_P (t) || DECL_WEAK (t))
1975 return true;
1976 return false;
1977
1978 default:
1979 /* Any floating arithmetic may trap. */
1980 if (fp_operation && flag_trapping_math)
1981 return true;
1982 return false;
1983 }
1984 }
1985
1986 bool
1987 tree_could_throw_p (tree t)
1988 {
1989 if (!flag_exceptions)
1990 return false;
1991 if (TREE_CODE (t) == MODIFY_EXPR)
1992 {
1993 if (flag_non_call_exceptions
1994 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1995 return true;
1996 t = TREE_OPERAND (t, 1);
1997 }
1998
1999 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2000 t = TREE_OPERAND (t, 0);
2001 if (TREE_CODE (t) == CALL_EXPR)
2002 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2003 if (flag_non_call_exceptions)
2004 return tree_could_trap_p (t);
2005 return false;
2006 }
2007
2008 bool
2009 tree_can_throw_internal (tree stmt)
2010 {
2011 int region_nr;
2012 bool is_resx = false;
2013
2014 if (TREE_CODE (stmt) == RESX_EXPR)
2015 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2016 else
2017 region_nr = lookup_stmt_eh_region (stmt);
2018 if (region_nr < 0)
2019 return false;
2020 return can_throw_internal_1 (region_nr, is_resx);
2021 }
2022
2023 bool
2024 tree_can_throw_external (tree stmt)
2025 {
2026 int region_nr;
2027 bool is_resx = false;
2028
2029 if (TREE_CODE (stmt) == RESX_EXPR)
2030 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2031 else
2032 region_nr = lookup_stmt_eh_region (stmt);
2033 if (region_nr < 0)
2034 return tree_could_throw_p (stmt);
2035 else
2036 return can_throw_external_1 (region_nr, is_resx);
2037 }
2038
2039 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2040 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2041 in the table if it should be in there. Return TRUE if a replacement was
2042 done that my require an EH edge purge. */
2043
2044 bool
2045 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2046 {
2047 int region_nr = lookup_stmt_eh_region (old_stmt);
2048
2049 if (region_nr >= 0)
2050 {
2051 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2052
2053 if (new_stmt == old_stmt && new_stmt_could_throw)
2054 return false;
2055
2056 remove_stmt_from_eh_region (old_stmt);
2057 if (new_stmt_could_throw)
2058 {
2059 add_stmt_to_eh_region (new_stmt, region_nr);
2060 return false;
2061 }
2062 else
2063 return true;
2064 }
2065
2066 return false;
2067 }