errors.h (warning, [...]): Mark as cold.
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39 #include "toplev.h"
40
41 \f
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p = 0;
44
45 void
46 using_eh_for_cleanups (void)
47 {
48 using_eh_for_cleanups_p = 1;
49 }
50 \f
51 /* Misc functions used in this file. */
52
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
56
57 static int
58 struct_ptr_eq (const void *a, const void *b)
59 {
60 const void * const * x = (const void * const *) a;
61 const void * const * y = (const void * const *) b;
62 return *x == *y;
63 }
64
65 static hashval_t
66 struct_ptr_hash (const void *a)
67 {
68 const void * const * x = (const void * const *) a;
69 return (size_t)*x >> 4;
70 }
71
72 \f
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
76
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
80
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
84
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
87 {
88 if (!region)
89 return;
90
91 add_stmt_to_eh_region (t, get_eh_region_number (region));
92 }
93
94 void
95 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96 {
97 struct throw_stmt_node *n;
98 void **slot;
99
100 gcc_assert (num >= 0);
101 gcc_assert (TREE_CODE (t) != RESX_EXPR);
102
103 n = GGC_NEW (struct throw_stmt_node);
104 n->stmt = t;
105 n->region_nr = num;
106
107 if (!get_eh_throw_stmt_table (ifun))
108 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109 struct_ptr_eq,
110 ggc_free));
111
112 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 gcc_assert (!*slot);
114 *slot = n;
115 }
116
117 void
118 add_stmt_to_eh_region (tree t, int num)
119 {
120 add_stmt_to_eh_region_fn (cfun, t, num);
121 }
122
123 bool
124 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
125 {
126 struct throw_stmt_node dummy;
127 void **slot;
128
129 if (!get_eh_throw_stmt_table (ifun))
130 return false;
131
132 dummy.stmt = t;
133 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
134 NO_INSERT);
135 if (slot)
136 {
137 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
138 return true;
139 }
140 else
141 return false;
142 }
143
144 bool
145 remove_stmt_from_eh_region (tree t)
146 {
147 return remove_stmt_from_eh_region_fn (cfun, t);
148 }
149
150 int
151 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
152 {
153 struct throw_stmt_node *p, n;
154
155 if (!get_eh_throw_stmt_table (ifun))
156 return -2;
157
158 n.stmt = t;
159 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
160 &n);
161
162 return (p ? p->region_nr : -1);
163 }
164
165 int
166 lookup_stmt_eh_region (tree t)
167 {
168 /* We can get called from initialized data when -fnon-call-exceptions
169 is on; prevent crash. */
170 if (!cfun)
171 return -1;
172 return lookup_stmt_eh_region_fn (cfun, t);
173 }
174
175 \f
176 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
177 nodes and LABEL_DECL nodes. We will use this during the second phase to
178 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
179
180 struct finally_tree_node
181 {
182 tree child, parent;
183 };
184
185 /* Note that this table is *not* marked GTY. It is short-lived. */
186 static htab_t finally_tree;
187
188 static void
189 record_in_finally_tree (tree child, tree parent)
190 {
191 struct finally_tree_node *n;
192 void **slot;
193
194 n = XNEW (struct finally_tree_node);
195 n->child = child;
196 n->parent = parent;
197
198 slot = htab_find_slot (finally_tree, n, INSERT);
199 gcc_assert (!*slot);
200 *slot = n;
201 }
202
203 static void
204 collect_finally_tree (tree t, tree region)
205 {
206 tailrecurse:
207 switch (TREE_CODE (t))
208 {
209 case LABEL_EXPR:
210 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
211 break;
212
213 case TRY_FINALLY_EXPR:
214 record_in_finally_tree (t, region);
215 collect_finally_tree (TREE_OPERAND (t, 0), t);
216 t = TREE_OPERAND (t, 1);
217 goto tailrecurse;
218
219 case TRY_CATCH_EXPR:
220 collect_finally_tree (TREE_OPERAND (t, 0), region);
221 t = TREE_OPERAND (t, 1);
222 goto tailrecurse;
223
224 case CATCH_EXPR:
225 t = CATCH_BODY (t);
226 goto tailrecurse;
227
228 case EH_FILTER_EXPR:
229 t = EH_FILTER_FAILURE (t);
230 goto tailrecurse;
231
232 case STATEMENT_LIST:
233 {
234 tree_stmt_iterator i;
235 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
236 collect_finally_tree (tsi_stmt (i), region);
237 }
238 break;
239
240 default:
241 /* A type, a decl, or some kind of statement that we're not
242 interested in. Don't walk them. */
243 break;
244 }
245 }
246
247 /* Use the finally tree to determine if a jump from START to TARGET
248 would leave the try_finally node that START lives in. */
249
250 static bool
251 outside_finally_tree (tree start, tree target)
252 {
253 struct finally_tree_node n, *p;
254
255 do
256 {
257 n.child = start;
258 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
259 if (!p)
260 return true;
261 start = p->parent;
262 }
263 while (start != target);
264
265 return false;
266 }
267 \f
268 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
269 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
270 The eh region creation is straight-forward, but frobbing all the gotos
271 and such into shape isn't. */
272
273 /* State of the world while lowering. */
274
275 struct leh_state
276 {
277 /* What's "current" while constructing the eh region tree. These
278 correspond to variables of the same name in cfun->eh, which we
279 don't have easy access to. */
280 struct eh_region *cur_region;
281 struct eh_region *prev_try;
282
283 /* Processing of TRY_FINALLY requires a bit more state. This is
284 split out into a separate structure so that we don't have to
285 copy so much when processing other nodes. */
286 struct leh_tf_state *tf;
287 };
288
289 struct leh_tf_state
290 {
291 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
292 is the original TRY_FINALLY_EXPR. We need to retain this so that
293 outside_finally_tree can reliably reference the tree used in the
294 collect_finally_tree data structures. */
295 tree try_finally_expr;
296 tree *top_p;
297
298 /* The state outside this try_finally node. */
299 struct leh_state *outer;
300
301 /* The exception region created for it. */
302 struct eh_region *region;
303
304 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
305 that are seen to escape this TRY_FINALLY_EXPR node. */
306 struct goto_queue_node {
307 tree stmt;
308 tree repl_stmt;
309 tree cont_stmt;
310 int index;
311 } *goto_queue;
312 size_t goto_queue_size;
313 size_t goto_queue_active;
314
315 /* The set of unique labels seen as entries in the goto queue. */
316 VEC(tree,heap) *dest_array;
317
318 /* A label to be added at the end of the completed transformed
319 sequence. It will be set if may_fallthru was true *at one time*,
320 though subsequent transformations may have cleared that flag. */
321 tree fallthru_label;
322
323 /* A label that has been registered with except.c to be the
324 landing pad for this try block. */
325 tree eh_label;
326
327 /* True if it is possible to fall out the bottom of the try block.
328 Cleared if the fallthru is converted to a goto. */
329 bool may_fallthru;
330
331 /* True if any entry in goto_queue is a RETURN_EXPR. */
332 bool may_return;
333
334 /* True if the finally block can receive an exception edge.
335 Cleared if the exception case is handled by code duplication. */
336 bool may_throw;
337 };
338
339 static void lower_eh_filter (struct leh_state *, tree *);
340 static void lower_eh_constructs_1 (struct leh_state *, tree *);
341
342 /* Comparison function for qsort/bsearch. We're interested in
343 searching goto queue elements for source statements. */
344
345 static int
346 goto_queue_cmp (const void *x, const void *y)
347 {
348 tree a = ((const struct goto_queue_node *)x)->stmt;
349 tree b = ((const struct goto_queue_node *)y)->stmt;
350 return (a == b ? 0 : a < b ? -1 : 1);
351 }
352
353 /* Search for STMT in the goto queue. Return the replacement,
354 or null if the statement isn't in the queue. */
355
356 static tree
357 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
358 {
359 struct goto_queue_node tmp, *ret;
360 tmp.stmt = stmt;
361 ret = (struct goto_queue_node *)
362 bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
363 sizeof (struct goto_queue_node), goto_queue_cmp);
364 return (ret ? ret->repl_stmt : NULL);
365 }
366
367 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
368 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
369 then we can just splat it in, otherwise we add the new stmts immediately
370 after the COND_EXPR and redirect. */
371
372 static void
373 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
374 tree_stmt_iterator *tsi)
375 {
376 tree new, one, label;
377
378 new = find_goto_replacement (tf, *tp);
379 if (!new)
380 return;
381
382 one = expr_only (new);
383 if (one && TREE_CODE (one) == GOTO_EXPR)
384 {
385 *tp = one;
386 return;
387 }
388
389 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
390 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
391
392 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
393 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
394 }
395
396 /* The real work of replace_goto_queue. Returns with TSI updated to
397 point to the next statement. */
398
399 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
400
401 static void
402 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
403 {
404 switch (TREE_CODE (t))
405 {
406 case GOTO_EXPR:
407 case RETURN_EXPR:
408 t = find_goto_replacement (tf, t);
409 if (t)
410 {
411 tsi_link_before (tsi, t, TSI_SAME_STMT);
412 tsi_delink (tsi);
413 return;
414 }
415 break;
416
417 case COND_EXPR:
418 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
419 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
420 break;
421
422 case TRY_FINALLY_EXPR:
423 case TRY_CATCH_EXPR:
424 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
425 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
426 break;
427 case CATCH_EXPR:
428 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
429 break;
430 case EH_FILTER_EXPR:
431 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
432 break;
433
434 case STATEMENT_LIST:
435 gcc_unreachable ();
436
437 default:
438 /* These won't have gotos in them. */
439 break;
440 }
441
442 tsi_next (tsi);
443 }
444
445 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
446
447 static void
448 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
449 {
450 tree_stmt_iterator i = tsi_start (t);
451 while (!tsi_end_p (i))
452 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
453 }
454
455 /* Replace all goto queue members. */
456
457 static void
458 replace_goto_queue (struct leh_tf_state *tf)
459 {
460 if (tf->goto_queue_active == 0)
461 return;
462 replace_goto_queue_stmt_list (*tf->top_p, tf);
463 }
464
465 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
466 node, and if so record that fact in the goto queue associated with that
467 try_finally node. */
468
469 static void
470 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
471 {
472 struct leh_tf_state *tf = state->tf;
473 struct goto_queue_node *q;
474 size_t active, size;
475 int index;
476
477 if (!tf)
478 return;
479
480 switch (TREE_CODE (stmt))
481 {
482 case GOTO_EXPR:
483 {
484 tree lab = GOTO_DESTINATION (stmt);
485
486 /* Computed and non-local gotos do not get processed. Given
487 their nature we can neither tell whether we've escaped the
488 finally block nor redirect them if we knew. */
489 if (TREE_CODE (lab) != LABEL_DECL)
490 return;
491
492 /* No need to record gotos that don't leave the try block. */
493 if (! outside_finally_tree (lab, tf->try_finally_expr))
494 return;
495
496 if (! tf->dest_array)
497 {
498 tf->dest_array = VEC_alloc (tree, heap, 10);
499 VEC_quick_push (tree, tf->dest_array, lab);
500 index = 0;
501 }
502 else
503 {
504 int n = VEC_length (tree, tf->dest_array);
505 for (index = 0; index < n; ++index)
506 if (VEC_index (tree, tf->dest_array, index) == lab)
507 break;
508 if (index == n)
509 VEC_safe_push (tree, heap, tf->dest_array, lab);
510 }
511 }
512 break;
513
514 case RETURN_EXPR:
515 tf->may_return = true;
516 index = -1;
517 break;
518
519 default:
520 gcc_unreachable ();
521 }
522
523 active = tf->goto_queue_active;
524 size = tf->goto_queue_size;
525 if (active >= size)
526 {
527 size = (size ? size * 2 : 32);
528 tf->goto_queue_size = size;
529 tf->goto_queue
530 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
531 }
532
533 q = &tf->goto_queue[active];
534 tf->goto_queue_active = active + 1;
535
536 memset (q, 0, sizeof (*q));
537 q->stmt = stmt;
538 q->index = index;
539 }
540
541 #ifdef ENABLE_CHECKING
542 /* We do not process SWITCH_EXPRs for now. As long as the original source
543 was in fact structured, and we've not yet done jump threading, then none
544 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
545
546 static void
547 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
548 {
549 struct leh_tf_state *tf = state->tf;
550 size_t i, n;
551 tree vec;
552
553 if (!tf)
554 return;
555
556 vec = SWITCH_LABELS (switch_expr);
557 n = TREE_VEC_LENGTH (vec);
558
559 for (i = 0; i < n; ++i)
560 {
561 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
562 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
563 }
564 }
565 #else
566 #define verify_norecord_switch_expr(state, switch_expr)
567 #endif
568
569 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
570 whatever is needed to finish the return. If MOD is non-null, insert it
571 before the new branch. RETURN_VALUE_P is a cache containing a temporary
572 variable to be used in manipulating the value returned from the function. */
573
574 static void
575 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
576 tree *return_value_p)
577 {
578 tree ret_expr = TREE_OPERAND (q->stmt, 0);
579 tree x;
580
581 if (ret_expr)
582 {
583 /* The nasty part about redirecting the return value is that the
584 return value itself is to be computed before the FINALLY block
585 is executed. e.g.
586
587 int x;
588 int foo (void)
589 {
590 x = 0;
591 try {
592 return x;
593 } finally {
594 x++;
595 }
596 }
597
598 should return 0, not 1. Arrange for this to happen by copying
599 computed the return value into a local temporary. This also
600 allows us to redirect multiple return statements through the
601 same destination block; whether this is a net win or not really
602 depends, I guess, but it does make generation of the switch in
603 lower_try_finally_switch easier. */
604
605 switch (TREE_CODE (ret_expr))
606 {
607 case RESULT_DECL:
608 if (!*return_value_p)
609 *return_value_p = ret_expr;
610 else
611 gcc_assert (*return_value_p == ret_expr);
612 q->cont_stmt = q->stmt;
613 break;
614
615 case GIMPLE_MODIFY_STMT:
616 {
617 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
618 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
619
620 if (!*return_value_p)
621 {
622 if (aggregate_value_p (TREE_TYPE (result),
623 TREE_TYPE (current_function_decl)))
624 /* If this function returns in memory, copy the argument
625 into the return slot now. Otherwise, we might need to
626 worry about magic return semantics, so we need to use a
627 temporary to hold the value until we're actually ready
628 to return. */
629 new = result;
630 else
631 new = create_tmp_var (TREE_TYPE (old), "rettmp");
632 *return_value_p = new;
633 }
634 else
635 new = *return_value_p;
636
637 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (new), new, old);
638 append_to_statement_list (x, &q->repl_stmt);
639
640 if (new == result)
641 x = result;
642 else
643 x = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (result), result, new);
644 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
645 }
646
647 default:
648 gcc_unreachable ();
649 }
650 }
651 else
652 {
653 /* If we don't return a value, all return statements are the same. */
654 q->cont_stmt = q->stmt;
655 }
656
657 if (mod)
658 append_to_statement_list (mod, &q->repl_stmt);
659
660 x = build1 (GOTO_EXPR, void_type_node, finlab);
661 append_to_statement_list (x, &q->repl_stmt);
662 }
663
664 /* Similar, but easier, for GOTO_EXPR. */
665
666 static void
667 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
668 {
669 tree x;
670
671 q->cont_stmt = q->stmt;
672 if (mod)
673 append_to_statement_list (mod, &q->repl_stmt);
674
675 x = build1 (GOTO_EXPR, void_type_node, finlab);
676 append_to_statement_list (x, &q->repl_stmt);
677 }
678
679 /* We want to transform
680 try { body; } catch { stuff; }
681 to
682 body; goto over; lab: stuff; over:
683
684 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
685 should be placed before the second operand, or NULL. OVER is
686 an existing label that should be put at the exit, or NULL. */
687
688 static void
689 frob_into_branch_around (tree *tp, tree lab, tree over)
690 {
691 tree x, op1;
692
693 op1 = TREE_OPERAND (*tp, 1);
694 *tp = TREE_OPERAND (*tp, 0);
695
696 if (block_may_fallthru (*tp))
697 {
698 if (!over)
699 over = create_artificial_label ();
700 x = build1 (GOTO_EXPR, void_type_node, over);
701 append_to_statement_list (x, tp);
702 }
703
704 if (lab)
705 {
706 x = build1 (LABEL_EXPR, void_type_node, lab);
707 append_to_statement_list (x, tp);
708 }
709
710 append_to_statement_list (op1, tp);
711
712 if (over)
713 {
714 x = build1 (LABEL_EXPR, void_type_node, over);
715 append_to_statement_list (x, tp);
716 }
717 }
718
719 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
720 Make sure to record all new labels found. */
721
722 static tree
723 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
724 {
725 tree region = NULL;
726
727 t = unsave_expr_now (t);
728
729 if (outer_state->tf)
730 region = outer_state->tf->try_finally_expr;
731 collect_finally_tree (t, region);
732
733 return t;
734 }
735
736 /* A subroutine of lower_try_finally. Create a fallthru label for
737 the given try_finally state. The only tricky bit here is that
738 we have to make sure to record the label in our outer context. */
739
740 static tree
741 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
742 {
743 tree label = tf->fallthru_label;
744 if (!label)
745 {
746 label = create_artificial_label ();
747 tf->fallthru_label = label;
748 if (tf->outer->tf)
749 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
750 }
751 return label;
752 }
753
754 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
755 returns non-null, then the language requires that the exception path out
756 of a try_finally be treated specially. To wit: the code within the
757 finally block may not itself throw an exception. We have two choices here.
758 First we can duplicate the finally block and wrap it in a must_not_throw
759 region. Second, we can generate code like
760
761 try {
762 finally_block;
763 } catch {
764 if (fintmp == eh_edge)
765 protect_cleanup_actions;
766 }
767
768 where "fintmp" is the temporary used in the switch statement generation
769 alternative considered below. For the nonce, we always choose the first
770 option.
771
772 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
773
774 static void
775 honor_protect_cleanup_actions (struct leh_state *outer_state,
776 struct leh_state *this_state,
777 struct leh_tf_state *tf)
778 {
779 tree protect_cleanup_actions, finally, x;
780 tree_stmt_iterator i;
781 bool finally_may_fallthru;
782
783 /* First check for nothing to do. */
784 if (lang_protect_cleanup_actions)
785 protect_cleanup_actions = lang_protect_cleanup_actions ();
786 else
787 protect_cleanup_actions = NULL;
788
789 finally = TREE_OPERAND (*tf->top_p, 1);
790
791 /* If the EH case of the finally block can fall through, this may be a
792 structure of the form
793 try {
794 try {
795 throw ...;
796 } cleanup {
797 try {
798 throw ...;
799 } catch (...) {
800 }
801 }
802 } catch (...) {
803 yyy;
804 }
805 E.g. with an inline destructor with an embedded try block. In this
806 case we must save the runtime EH data around the nested exception.
807
808 This complication means that any time the previous runtime data might
809 be used (via fallthru from the finally) we handle the eh case here,
810 whether or not protect_cleanup_actions is active. */
811
812 finally_may_fallthru = block_may_fallthru (finally);
813 if (!finally_may_fallthru && !protect_cleanup_actions)
814 return;
815
816 /* Duplicate the FINALLY block. Only need to do this for try-finally,
817 and not for cleanups. */
818 if (this_state)
819 finally = lower_try_finally_dup_block (finally, outer_state);
820
821 /* Resume execution after the exception. Adding this now lets
822 lower_eh_filter not add unnecessary gotos, as it is clear that
823 we never fallthru from this copy of the finally block. */
824 if (finally_may_fallthru)
825 {
826 tree save_eptr, save_filt;
827
828 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
829 save_filt = create_tmp_var (integer_type_node, "save_filt");
830
831 i = tsi_start (finally);
832 x = build0 (EXC_PTR_EXPR, ptr_type_node);
833 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, save_eptr, x);
834 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
835
836 x = build0 (FILTER_EXPR, integer_type_node);
837 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, save_filt, x);
838 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
839
840 i = tsi_last (finally);
841 x = build0 (EXC_PTR_EXPR, ptr_type_node);
842 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, x, save_eptr);
843 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
844
845 x = build0 (FILTER_EXPR, integer_type_node);
846 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, x, save_filt);
847 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
848
849 x = build_resx (get_eh_region_number (tf->region));
850 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
851 }
852
853 /* Wrap the block with protect_cleanup_actions as the action. */
854 if (protect_cleanup_actions)
855 {
856 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
857 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
858 EH_FILTER_MUST_NOT_THROW (x) = 1;
859 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
860 lower_eh_filter (outer_state, &finally);
861 }
862 else
863 lower_eh_constructs_1 (outer_state, &finally);
864
865 /* Hook this up to the end of the existing try block. If we
866 previously fell through the end, we'll have to branch around.
867 This means adding a new goto, and adding it to the queue. */
868
869 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
870
871 if (tf->may_fallthru)
872 {
873 x = lower_try_finally_fallthru_label (tf);
874 x = build1 (GOTO_EXPR, void_type_node, x);
875 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
876
877 if (this_state)
878 maybe_record_in_goto_queue (this_state, x);
879
880 tf->may_fallthru = false;
881 }
882
883 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
884 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
885 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
886
887 /* Having now been handled, EH isn't to be considered with
888 the rest of the outgoing edges. */
889 tf->may_throw = false;
890 }
891
892 /* A subroutine of lower_try_finally. We have determined that there is
893 no fallthru edge out of the finally block. This means that there is
894 no outgoing edge corresponding to any incoming edge. Restructure the
895 try_finally node for this special case. */
896
897 static void
898 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
899 {
900 tree x, finally, lab, return_val;
901 struct goto_queue_node *q, *qe;
902
903 if (tf->may_throw)
904 lab = tf->eh_label;
905 else
906 lab = create_artificial_label ();
907
908 finally = TREE_OPERAND (*tf->top_p, 1);
909 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
910
911 x = build1 (LABEL_EXPR, void_type_node, lab);
912 append_to_statement_list (x, tf->top_p);
913
914 return_val = NULL;
915 q = tf->goto_queue;
916 qe = q + tf->goto_queue_active;
917 for (; q < qe; ++q)
918 if (q->index < 0)
919 do_return_redirection (q, lab, NULL, &return_val);
920 else
921 do_goto_redirection (q, lab, NULL);
922
923 replace_goto_queue (tf);
924
925 lower_eh_constructs_1 (state, &finally);
926 append_to_statement_list (finally, tf->top_p);
927 }
928
929 /* A subroutine of lower_try_finally. We have determined that there is
930 exactly one destination of the finally block. Restructure the
931 try_finally node for this special case. */
932
933 static void
934 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
935 {
936 struct goto_queue_node *q, *qe;
937 tree x, finally, finally_label;
938
939 finally = TREE_OPERAND (*tf->top_p, 1);
940 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
941
942 lower_eh_constructs_1 (state, &finally);
943
944 if (tf->may_throw)
945 {
946 /* Only reachable via the exception edge. Add the given label to
947 the head of the FINALLY block. Append a RESX at the end. */
948
949 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
950 append_to_statement_list (x, tf->top_p);
951
952 append_to_statement_list (finally, tf->top_p);
953
954 x = build_resx (get_eh_region_number (tf->region));
955
956 append_to_statement_list (x, tf->top_p);
957
958 return;
959 }
960
961 if (tf->may_fallthru)
962 {
963 /* Only reachable via the fallthru edge. Do nothing but let
964 the two blocks run together; we'll fall out the bottom. */
965 append_to_statement_list (finally, tf->top_p);
966 return;
967 }
968
969 finally_label = create_artificial_label ();
970 x = build1 (LABEL_EXPR, void_type_node, finally_label);
971 append_to_statement_list (x, tf->top_p);
972
973 append_to_statement_list (finally, tf->top_p);
974
975 q = tf->goto_queue;
976 qe = q + tf->goto_queue_active;
977
978 if (tf->may_return)
979 {
980 /* Reachable by return expressions only. Redirect them. */
981 tree return_val = NULL;
982 for (; q < qe; ++q)
983 do_return_redirection (q, finally_label, NULL, &return_val);
984 replace_goto_queue (tf);
985 }
986 else
987 {
988 /* Reachable by goto expressions only. Redirect them. */
989 for (; q < qe; ++q)
990 do_goto_redirection (q, finally_label, NULL);
991 replace_goto_queue (tf);
992
993 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
994 {
995 /* Reachable by goto to fallthru label only. Redirect it
996 to the new label (already created, sadly), and do not
997 emit the final branch out, or the fallthru label. */
998 tf->fallthru_label = NULL;
999 return;
1000 }
1001 }
1002
1003 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1004 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1005 }
1006
1007 /* A subroutine of lower_try_finally. There are multiple edges incoming
1008 and outgoing from the finally block. Implement this by duplicating the
1009 finally block for every destination. */
1010
1011 static void
1012 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1013 {
1014 tree finally, new_stmt;
1015 tree x;
1016
1017 finally = TREE_OPERAND (*tf->top_p, 1);
1018 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1019
1020 new_stmt = NULL_TREE;
1021
1022 if (tf->may_fallthru)
1023 {
1024 x = lower_try_finally_dup_block (finally, state);
1025 lower_eh_constructs_1 (state, &x);
1026 append_to_statement_list (x, &new_stmt);
1027
1028 x = lower_try_finally_fallthru_label (tf);
1029 x = build1 (GOTO_EXPR, void_type_node, x);
1030 append_to_statement_list (x, &new_stmt);
1031 }
1032
1033 if (tf->may_throw)
1034 {
1035 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1036 append_to_statement_list (x, &new_stmt);
1037
1038 x = lower_try_finally_dup_block (finally, state);
1039 lower_eh_constructs_1 (state, &x);
1040 append_to_statement_list (x, &new_stmt);
1041
1042 x = build_resx (get_eh_region_number (tf->region));
1043 append_to_statement_list (x, &new_stmt);
1044 }
1045
1046 if (tf->goto_queue)
1047 {
1048 struct goto_queue_node *q, *qe;
1049 tree return_val = NULL;
1050 int return_index, index;
1051 struct labels_s
1052 {
1053 struct goto_queue_node *q;
1054 tree label;
1055 } *labels;
1056
1057 return_index = VEC_length (tree, tf->dest_array);
1058 labels = XCNEWVEC (struct labels_s, return_index + 1);
1059
1060 q = tf->goto_queue;
1061 qe = q + tf->goto_queue_active;
1062 for (; q < qe; q++)
1063 {
1064 index = q->index < 0 ? return_index : q->index;
1065
1066 if (!labels[index].q)
1067 labels[index].q = q;
1068 }
1069
1070 for (index = 0; index < return_index + 1; index++)
1071 {
1072 tree lab;
1073
1074 q = labels[index].q;
1075 if (! q)
1076 continue;
1077
1078 lab = labels[index].label = create_artificial_label ();
1079
1080 if (index == return_index)
1081 do_return_redirection (q, lab, NULL, &return_val);
1082 else
1083 do_goto_redirection (q, lab, NULL);
1084
1085 x = build1 (LABEL_EXPR, void_type_node, lab);
1086 append_to_statement_list (x, &new_stmt);
1087
1088 x = lower_try_finally_dup_block (finally, state);
1089 lower_eh_constructs_1 (state, &x);
1090 append_to_statement_list (x, &new_stmt);
1091
1092 append_to_statement_list (q->cont_stmt, &new_stmt);
1093 maybe_record_in_goto_queue (state, q->cont_stmt);
1094 }
1095
1096 for (q = tf->goto_queue; q < qe; q++)
1097 {
1098 tree lab;
1099
1100 index = q->index < 0 ? return_index : q->index;
1101
1102 if (labels[index].q == q)
1103 continue;
1104
1105 lab = labels[index].label;
1106
1107 if (index == return_index)
1108 do_return_redirection (q, lab, NULL, &return_val);
1109 else
1110 do_goto_redirection (q, lab, NULL);
1111 }
1112
1113 replace_goto_queue (tf);
1114 free (labels);
1115 }
1116
1117 /* Need to link new stmts after running replace_goto_queue due
1118 to not wanting to process the same goto stmts twice. */
1119 append_to_statement_list (new_stmt, tf->top_p);
1120 }
1121
1122 /* A subroutine of lower_try_finally. There are multiple edges incoming
1123 and outgoing from the finally block. Implement this by instrumenting
1124 each incoming edge and creating a switch statement at the end of the
1125 finally block that branches to the appropriate destination. */
1126
1127 static void
1128 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1129 {
1130 struct goto_queue_node *q, *qe;
1131 tree return_val = NULL;
1132 tree finally, finally_tmp, finally_label;
1133 int return_index, eh_index, fallthru_index;
1134 int nlabels, ndests, j, last_case_index;
1135 tree case_label_vec, switch_stmt, last_case, switch_body;
1136 tree x;
1137
1138 /* Mash the TRY block to the head of the chain. */
1139 finally = TREE_OPERAND (*tf->top_p, 1);
1140 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1141
1142 /* Lower the finally block itself. */
1143 lower_eh_constructs_1 (state, &finally);
1144
1145 /* Prepare for switch statement generation. */
1146 nlabels = VEC_length (tree, tf->dest_array);
1147 return_index = nlabels;
1148 eh_index = return_index + tf->may_return;
1149 fallthru_index = eh_index + tf->may_throw;
1150 ndests = fallthru_index + tf->may_fallthru;
1151
1152 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1153 finally_label = create_artificial_label ();
1154
1155 case_label_vec = make_tree_vec (ndests);
1156 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1157 NULL_TREE, case_label_vec);
1158 switch_body = NULL;
1159 last_case = NULL;
1160 last_case_index = 0;
1161
1162 /* Begin inserting code for getting to the finally block. Things
1163 are done in this order to correspond to the sequence the code is
1164 layed out. */
1165
1166 if (tf->may_fallthru)
1167 {
1168 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1169 build_int_cst (NULL_TREE, fallthru_index));
1170 append_to_statement_list (x, tf->top_p);
1171
1172 if (tf->may_throw)
1173 {
1174 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1175 append_to_statement_list (x, tf->top_p);
1176 }
1177
1178
1179 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1180 build_int_cst (NULL_TREE, fallthru_index), NULL,
1181 create_artificial_label ());
1182 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1183 last_case_index++;
1184
1185 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1186 append_to_statement_list (x, &switch_body);
1187
1188 x = lower_try_finally_fallthru_label (tf);
1189 x = build1 (GOTO_EXPR, void_type_node, x);
1190 append_to_statement_list (x, &switch_body);
1191 }
1192
1193 if (tf->may_throw)
1194 {
1195 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1196 append_to_statement_list (x, tf->top_p);
1197
1198 x = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1199 build_int_cst (NULL_TREE, eh_index));
1200 append_to_statement_list (x, tf->top_p);
1201
1202 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1203 build_int_cst (NULL_TREE, eh_index), NULL,
1204 create_artificial_label ());
1205 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1206 last_case_index++;
1207
1208 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1209 append_to_statement_list (x, &switch_body);
1210 x = build_resx (get_eh_region_number (tf->region));
1211 append_to_statement_list (x, &switch_body);
1212 }
1213
1214 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1215 append_to_statement_list (x, tf->top_p);
1216
1217 append_to_statement_list (finally, tf->top_p);
1218
1219 /* Redirect each incoming goto edge. */
1220 q = tf->goto_queue;
1221 qe = q + tf->goto_queue_active;
1222 j = last_case_index + tf->may_return;
1223 for (; q < qe; ++q)
1224 {
1225 tree mod;
1226 int switch_id, case_index;
1227
1228 if (q->index < 0)
1229 {
1230 mod = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1231 build_int_cst (NULL_TREE, return_index));
1232 do_return_redirection (q, finally_label, mod, &return_val);
1233 switch_id = return_index;
1234 }
1235 else
1236 {
1237 mod = build2 (GIMPLE_MODIFY_STMT, void_type_node, finally_tmp,
1238 build_int_cst (NULL_TREE, q->index));
1239 do_goto_redirection (q, finally_label, mod);
1240 switch_id = q->index;
1241 }
1242
1243 case_index = j + q->index;
1244 if (!TREE_VEC_ELT (case_label_vec, case_index))
1245 TREE_VEC_ELT (case_label_vec, case_index)
1246 = build3 (CASE_LABEL_EXPR, void_type_node,
1247 build_int_cst (NULL_TREE, switch_id), NULL,
1248 /* We store the cont_stmt in the
1249 CASE_LABEL, so that we can recover it
1250 in the loop below. We don't create
1251 the new label while walking the
1252 goto_queue because pointers don't
1253 offer a stable order. */
1254 q->cont_stmt);
1255 }
1256 for (j = last_case_index; j < last_case_index + nlabels; j++)
1257 {
1258 tree label;
1259 tree cont_stmt;
1260
1261 last_case = TREE_VEC_ELT (case_label_vec, j);
1262
1263 gcc_assert (last_case);
1264
1265 cont_stmt = CASE_LABEL (last_case);
1266
1267 label = create_artificial_label ();
1268 CASE_LABEL (last_case) = label;
1269
1270 x = build1 (LABEL_EXPR, void_type_node, label);
1271 append_to_statement_list (x, &switch_body);
1272 append_to_statement_list (cont_stmt, &switch_body);
1273 maybe_record_in_goto_queue (state, cont_stmt);
1274 }
1275 replace_goto_queue (tf);
1276
1277 /* Make sure that the last case is the default label, as one is required.
1278 Then sort the labels, which is also required in GIMPLE. */
1279 CASE_LOW (last_case) = NULL;
1280 sort_case_labels (case_label_vec);
1281
1282 /* Need to link switch_stmt after running replace_goto_queue due
1283 to not wanting to process the same goto stmts twice. */
1284 append_to_statement_list (switch_stmt, tf->top_p);
1285 append_to_statement_list (switch_body, tf->top_p);
1286 }
1287
1288 /* Decide whether or not we are going to duplicate the finally block.
1289 There are several considerations.
1290
1291 First, if this is Java, then the finally block contains code
1292 written by the user. It has line numbers associated with it,
1293 so duplicating the block means it's difficult to set a breakpoint.
1294 Since controlling code generation via -g is verboten, we simply
1295 never duplicate code without optimization.
1296
1297 Second, we'd like to prevent egregious code growth. One way to
1298 do this is to estimate the size of the finally block, multiply
1299 that by the number of copies we'd need to make, and compare against
1300 the estimate of the size of the switch machinery we'd have to add. */
1301
1302 static bool
1303 decide_copy_try_finally (int ndests, tree finally)
1304 {
1305 int f_estimate, sw_estimate;
1306
1307 if (!optimize)
1308 return false;
1309
1310 /* Finally estimate N times, plus N gotos. */
1311 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1312 f_estimate = (f_estimate + 1) * ndests;
1313
1314 /* Switch statement (cost 10), N variable assignments, N gotos. */
1315 sw_estimate = 10 + 2 * ndests;
1316
1317 /* Optimize for size clearly wants our best guess. */
1318 if (optimize_size)
1319 return f_estimate < sw_estimate;
1320
1321 /* ??? These numbers are completely made up so far. */
1322 if (optimize > 1)
1323 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1324 else
1325 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1326 }
1327
1328 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1329 to a sequence of labels and blocks, plus the exception region trees
1330 that record all the magic. This is complicated by the need to
1331 arrange for the FINALLY block to be executed on all exits. */
1332
1333 static void
1334 lower_try_finally (struct leh_state *state, tree *tp)
1335 {
1336 struct leh_tf_state this_tf;
1337 struct leh_state this_state;
1338 int ndests;
1339
1340 /* Process the try block. */
1341
1342 memset (&this_tf, 0, sizeof (this_tf));
1343 this_tf.try_finally_expr = *tp;
1344 this_tf.top_p = tp;
1345 this_tf.outer = state;
1346 if (using_eh_for_cleanups_p)
1347 this_tf.region
1348 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1349 else
1350 this_tf.region = NULL;
1351
1352 this_state.cur_region = this_tf.region;
1353 this_state.prev_try = state->prev_try;
1354 this_state.tf = &this_tf;
1355
1356 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1357
1358 /* Determine if the try block is escaped through the bottom. */
1359 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1360
1361 /* Determine if any exceptions are possible within the try block. */
1362 if (using_eh_for_cleanups_p)
1363 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1364 if (this_tf.may_throw)
1365 {
1366 this_tf.eh_label = create_artificial_label ();
1367 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1368 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1369 }
1370
1371 /* Sort the goto queue for efficient searching later. */
1372 if (this_tf.goto_queue_active > 1)
1373 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1374 sizeof (struct goto_queue_node), goto_queue_cmp);
1375
1376 /* Determine how many edges (still) reach the finally block. Or rather,
1377 how many destinations are reached by the finally block. Use this to
1378 determine how we process the finally block itself. */
1379
1380 ndests = VEC_length (tree, this_tf.dest_array);
1381 ndests += this_tf.may_fallthru;
1382 ndests += this_tf.may_return;
1383 ndests += this_tf.may_throw;
1384
1385 /* If the FINALLY block is not reachable, dike it out. */
1386 if (ndests == 0)
1387 *tp = TREE_OPERAND (*tp, 0);
1388
1389 /* If the finally block doesn't fall through, then any destination
1390 we might try to impose there isn't reached either. There may be
1391 some minor amount of cleanup and redirection still needed. */
1392 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1393 lower_try_finally_nofallthru (state, &this_tf);
1394
1395 /* We can easily special-case redirection to a single destination. */
1396 else if (ndests == 1)
1397 lower_try_finally_onedest (state, &this_tf);
1398
1399 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1400 lower_try_finally_copy (state, &this_tf);
1401 else
1402 lower_try_finally_switch (state, &this_tf);
1403
1404 /* If someone requested we add a label at the end of the transformed
1405 block, do so. */
1406 if (this_tf.fallthru_label)
1407 {
1408 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1409 append_to_statement_list (x, tp);
1410 }
1411
1412 VEC_free (tree, heap, this_tf.dest_array);
1413 if (this_tf.goto_queue)
1414 free (this_tf.goto_queue);
1415 }
1416
1417 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1418 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1419 exception region trees that record all the magic. */
1420
1421 static void
1422 lower_catch (struct leh_state *state, tree *tp)
1423 {
1424 struct eh_region *try_region;
1425 struct leh_state this_state;
1426 tree_stmt_iterator i;
1427 tree out_label;
1428
1429 try_region = gen_eh_region_try (state->cur_region);
1430 this_state.cur_region = try_region;
1431 this_state.prev_try = try_region;
1432 this_state.tf = state->tf;
1433
1434 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1435
1436 if (!get_eh_region_may_contain_throw (try_region))
1437 {
1438 *tp = TREE_OPERAND (*tp, 0);
1439 return;
1440 }
1441
1442 out_label = NULL;
1443 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1444 {
1445 struct eh_region *catch_region;
1446 tree catch, x, eh_label;
1447
1448 catch = tsi_stmt (i);
1449 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1450
1451 this_state.cur_region = catch_region;
1452 this_state.prev_try = state->prev_try;
1453 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1454
1455 eh_label = create_artificial_label ();
1456 set_eh_region_tree_label (catch_region, eh_label);
1457
1458 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1459 tsi_link_before (&i, x, TSI_SAME_STMT);
1460
1461 if (block_may_fallthru (CATCH_BODY (catch)))
1462 {
1463 if (!out_label)
1464 out_label = create_artificial_label ();
1465
1466 x = build1 (GOTO_EXPR, void_type_node, out_label);
1467 append_to_statement_list (x, &CATCH_BODY (catch));
1468 }
1469
1470 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1471 tsi_delink (&i);
1472 }
1473
1474 frob_into_branch_around (tp, NULL, out_label);
1475 }
1476
1477 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1478 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1479 region trees that record all the magic. */
1480
1481 static void
1482 lower_eh_filter (struct leh_state *state, tree *tp)
1483 {
1484 struct leh_state this_state;
1485 struct eh_region *this_region;
1486 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1487 tree eh_label;
1488
1489 if (EH_FILTER_MUST_NOT_THROW (inner))
1490 this_region = gen_eh_region_must_not_throw (state->cur_region);
1491 else
1492 this_region = gen_eh_region_allowed (state->cur_region,
1493 EH_FILTER_TYPES (inner));
1494 this_state = *state;
1495 this_state.cur_region = this_region;
1496
1497 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1498
1499 if (!get_eh_region_may_contain_throw (this_region))
1500 {
1501 *tp = TREE_OPERAND (*tp, 0);
1502 return;
1503 }
1504
1505 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1506 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1507
1508 eh_label = create_artificial_label ();
1509 set_eh_region_tree_label (this_region, eh_label);
1510
1511 frob_into_branch_around (tp, eh_label, NULL);
1512 }
1513
1514 /* Implement a cleanup expression. This is similar to try-finally,
1515 except that we only execute the cleanup block for exception edges. */
1516
1517 static void
1518 lower_cleanup (struct leh_state *state, tree *tp)
1519 {
1520 struct leh_state this_state;
1521 struct eh_region *this_region;
1522 struct leh_tf_state fake_tf;
1523
1524 /* If not using eh, then exception-only cleanups are no-ops. */
1525 if (!flag_exceptions)
1526 {
1527 *tp = TREE_OPERAND (*tp, 0);
1528 lower_eh_constructs_1 (state, tp);
1529 return;
1530 }
1531
1532 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1533 this_state = *state;
1534 this_state.cur_region = this_region;
1535
1536 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1537
1538 if (!get_eh_region_may_contain_throw (this_region))
1539 {
1540 *tp = TREE_OPERAND (*tp, 0);
1541 return;
1542 }
1543
1544 /* Build enough of a try-finally state so that we can reuse
1545 honor_protect_cleanup_actions. */
1546 memset (&fake_tf, 0, sizeof (fake_tf));
1547 fake_tf.top_p = tp;
1548 fake_tf.outer = state;
1549 fake_tf.region = this_region;
1550 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1551 fake_tf.may_throw = true;
1552
1553 fake_tf.eh_label = create_artificial_label ();
1554 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1555
1556 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1557
1558 if (fake_tf.may_throw)
1559 {
1560 /* In this case honor_protect_cleanup_actions had nothing to do,
1561 and we should process this normally. */
1562 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1563 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1564 }
1565 else
1566 {
1567 /* In this case honor_protect_cleanup_actions did nearly all of
1568 the work. All we have left is to append the fallthru_label. */
1569
1570 *tp = TREE_OPERAND (*tp, 0);
1571 if (fake_tf.fallthru_label)
1572 {
1573 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1574 append_to_statement_list (x, tp);
1575 }
1576 }
1577 }
1578
1579 /* Main loop for lowering eh constructs. */
1580
1581 static void
1582 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1583 {
1584 tree_stmt_iterator i;
1585 tree t = *tp;
1586
1587 switch (TREE_CODE (t))
1588 {
1589 case COND_EXPR:
1590 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1591 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1592 break;
1593
1594 case CALL_EXPR:
1595 /* Look for things that can throw exceptions, and record them. */
1596 if (state->cur_region && tree_could_throw_p (t))
1597 {
1598 record_stmt_eh_region (state->cur_region, t);
1599 note_eh_region_may_contain_throw (state->cur_region);
1600 }
1601 break;
1602
1603 case GIMPLE_MODIFY_STMT:
1604 /* Look for things that can throw exceptions, and record them. */
1605 if (state->cur_region && tree_could_throw_p (t))
1606 {
1607 record_stmt_eh_region (state->cur_region, t);
1608 note_eh_region_may_contain_throw (state->cur_region);
1609 }
1610 break;
1611
1612 case GOTO_EXPR:
1613 case RETURN_EXPR:
1614 maybe_record_in_goto_queue (state, t);
1615 break;
1616 case SWITCH_EXPR:
1617 verify_norecord_switch_expr (state, t);
1618 break;
1619
1620 case TRY_FINALLY_EXPR:
1621 lower_try_finally (state, tp);
1622 break;
1623
1624 case TRY_CATCH_EXPR:
1625 i = tsi_start (TREE_OPERAND (t, 1));
1626 switch (TREE_CODE (tsi_stmt (i)))
1627 {
1628 case CATCH_EXPR:
1629 lower_catch (state, tp);
1630 break;
1631 case EH_FILTER_EXPR:
1632 lower_eh_filter (state, tp);
1633 break;
1634 default:
1635 lower_cleanup (state, tp);
1636 break;
1637 }
1638 break;
1639
1640 case STATEMENT_LIST:
1641 for (i = tsi_start (t); !tsi_end_p (i); )
1642 {
1643 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1644 t = tsi_stmt (i);
1645 if (TREE_CODE (t) == STATEMENT_LIST)
1646 {
1647 tsi_link_before (&i, t, TSI_SAME_STMT);
1648 tsi_delink (&i);
1649 }
1650 else
1651 tsi_next (&i);
1652 }
1653 break;
1654
1655 default:
1656 /* A type, a decl, or some kind of statement that we're not
1657 interested in. Don't walk them. */
1658 break;
1659 }
1660 }
1661
1662 static unsigned int
1663 lower_eh_constructs (void)
1664 {
1665 struct leh_state null_state;
1666 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1667
1668 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1669
1670 collect_finally_tree (*tp, NULL);
1671
1672 memset (&null_state, 0, sizeof (null_state));
1673 lower_eh_constructs_1 (&null_state, tp);
1674
1675 htab_delete (finally_tree);
1676
1677 collect_eh_region_array ();
1678 return 0;
1679 }
1680
1681 struct tree_opt_pass pass_lower_eh =
1682 {
1683 "eh", /* name */
1684 NULL, /* gate */
1685 lower_eh_constructs, /* execute */
1686 NULL, /* sub */
1687 NULL, /* next */
1688 0, /* static_pass_number */
1689 TV_TREE_EH, /* tv_id */
1690 PROP_gimple_lcf, /* properties_required */
1691 PROP_gimple_leh, /* properties_provided */
1692 0, /* properties_destroyed */
1693 0, /* todo_flags_start */
1694 TODO_dump_func, /* todo_flags_finish */
1695 0 /* letter */
1696 };
1697
1698 \f
1699 /* Construct EH edges for STMT. */
1700
1701 static void
1702 make_eh_edge (struct eh_region *region, void *data)
1703 {
1704 tree stmt, lab;
1705 basic_block src, dst;
1706
1707 stmt = (tree) data;
1708 lab = get_eh_region_tree_label (region);
1709
1710 src = bb_for_stmt (stmt);
1711 dst = label_to_block (lab);
1712
1713 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1714 }
1715
1716 void
1717 make_eh_edges (tree stmt)
1718 {
1719 int region_nr;
1720 bool is_resx;
1721
1722 if (TREE_CODE (stmt) == RESX_EXPR)
1723 {
1724 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1725 is_resx = true;
1726 }
1727 else
1728 {
1729 region_nr = lookup_stmt_eh_region (stmt);
1730 if (region_nr < 0)
1731 return;
1732 is_resx = false;
1733 }
1734
1735 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1736 }
1737
1738 static bool mark_eh_edge_found_error;
1739
1740 /* Mark edge make_eh_edge would create for given region by setting it aux
1741 field, output error if something goes wrong. */
1742 static void
1743 mark_eh_edge (struct eh_region *region, void *data)
1744 {
1745 tree stmt, lab;
1746 basic_block src, dst;
1747 edge e;
1748
1749 stmt = (tree) data;
1750 lab = get_eh_region_tree_label (region);
1751
1752 src = bb_for_stmt (stmt);
1753 dst = label_to_block (lab);
1754
1755 e = find_edge (src, dst);
1756 if (!e)
1757 {
1758 error ("EH edge %i->%i is missing", src->index, dst->index);
1759 mark_eh_edge_found_error = true;
1760 }
1761 else if (!(e->flags & EDGE_EH))
1762 {
1763 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1764 mark_eh_edge_found_error = true;
1765 }
1766 else if (e->aux)
1767 {
1768 /* ??? might not be mistake. */
1769 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1770 mark_eh_edge_found_error = true;
1771 }
1772 else
1773 e->aux = (void *)1;
1774 }
1775
1776 /* Verify that BB containing stmt as last stmt has precisely the edges
1777 make_eh_edges would create. */
1778 bool
1779 verify_eh_edges (tree stmt)
1780 {
1781 int region_nr;
1782 bool is_resx;
1783 basic_block bb = bb_for_stmt (stmt);
1784 edge_iterator ei;
1785 edge e;
1786
1787 FOR_EACH_EDGE (e, ei, bb->succs)
1788 gcc_assert (!e->aux);
1789 mark_eh_edge_found_error = false;
1790 if (TREE_CODE (stmt) == RESX_EXPR)
1791 {
1792 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1793 is_resx = true;
1794 }
1795 else
1796 {
1797 region_nr = lookup_stmt_eh_region (stmt);
1798 if (region_nr < 0)
1799 {
1800 FOR_EACH_EDGE (e, ei, bb->succs)
1801 if (e->flags & EDGE_EH)
1802 {
1803 error ("BB %i can not throw but has EH edges", bb->index);
1804 return true;
1805 }
1806 return false;
1807 }
1808 if (!tree_could_throw_p (stmt))
1809 {
1810 error ("BB %i last statement has incorrectly set region", bb->index);
1811 return true;
1812 }
1813 is_resx = false;
1814 }
1815
1816 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1817 FOR_EACH_EDGE (e, ei, bb->succs)
1818 {
1819 if ((e->flags & EDGE_EH) && !e->aux)
1820 {
1821 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1822 mark_eh_edge_found_error = true;
1823 return true;
1824 }
1825 e->aux = NULL;
1826 }
1827 return mark_eh_edge_found_error;
1828 }
1829
1830 \f
1831 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1832 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1833 This routine expects only GIMPLE lhs or rhs input. */
1834
1835 bool
1836 tree_could_trap_p (tree expr)
1837 {
1838 enum tree_code code = TREE_CODE (expr);
1839 bool honor_nans = false;
1840 bool honor_snans = false;
1841 bool fp_operation = false;
1842 bool honor_trapv = false;
1843 tree t, base;
1844
1845 if (TREE_CODE_CLASS (code) == tcc_comparison
1846 || TREE_CODE_CLASS (code) == tcc_unary
1847 || TREE_CODE_CLASS (code) == tcc_binary)
1848 {
1849 t = TREE_TYPE (expr);
1850 fp_operation = FLOAT_TYPE_P (t);
1851 if (fp_operation)
1852 {
1853 honor_nans = flag_trapping_math && !flag_finite_math_only;
1854 honor_snans = flag_signaling_nans != 0;
1855 }
1856 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1857 honor_trapv = true;
1858 }
1859
1860 restart:
1861 switch (code)
1862 {
1863 case TARGET_MEM_REF:
1864 /* For TARGET_MEM_REFs use the information based on the original
1865 reference. */
1866 expr = TMR_ORIGINAL (expr);
1867 code = TREE_CODE (expr);
1868 goto restart;
1869
1870 case COMPONENT_REF:
1871 case REALPART_EXPR:
1872 case IMAGPART_EXPR:
1873 case BIT_FIELD_REF:
1874 case VIEW_CONVERT_EXPR:
1875 case WITH_SIZE_EXPR:
1876 expr = TREE_OPERAND (expr, 0);
1877 code = TREE_CODE (expr);
1878 goto restart;
1879
1880 case ARRAY_RANGE_REF:
1881 base = TREE_OPERAND (expr, 0);
1882 if (tree_could_trap_p (base))
1883 return true;
1884
1885 if (TREE_THIS_NOTRAP (expr))
1886 return false;
1887
1888 return !range_in_array_bounds_p (expr);
1889
1890 case ARRAY_REF:
1891 base = TREE_OPERAND (expr, 0);
1892 if (tree_could_trap_p (base))
1893 return true;
1894
1895 if (TREE_THIS_NOTRAP (expr))
1896 return false;
1897
1898 return !in_array_bounds_p (expr);
1899
1900 case INDIRECT_REF:
1901 case ALIGN_INDIRECT_REF:
1902 case MISALIGNED_INDIRECT_REF:
1903 return !TREE_THIS_NOTRAP (expr);
1904
1905 case ASM_EXPR:
1906 return TREE_THIS_VOLATILE (expr);
1907
1908 case TRUNC_DIV_EXPR:
1909 case CEIL_DIV_EXPR:
1910 case FLOOR_DIV_EXPR:
1911 case ROUND_DIV_EXPR:
1912 case EXACT_DIV_EXPR:
1913 case CEIL_MOD_EXPR:
1914 case FLOOR_MOD_EXPR:
1915 case ROUND_MOD_EXPR:
1916 case TRUNC_MOD_EXPR:
1917 case RDIV_EXPR:
1918 if (honor_snans || honor_trapv)
1919 return true;
1920 if (fp_operation)
1921 return flag_trapping_math;
1922 t = TREE_OPERAND (expr, 1);
1923 if (!TREE_CONSTANT (t) || integer_zerop (t))
1924 return true;
1925 return false;
1926
1927 case LT_EXPR:
1928 case LE_EXPR:
1929 case GT_EXPR:
1930 case GE_EXPR:
1931 case LTGT_EXPR:
1932 /* Some floating point comparisons may trap. */
1933 return honor_nans;
1934
1935 case EQ_EXPR:
1936 case NE_EXPR:
1937 case UNORDERED_EXPR:
1938 case ORDERED_EXPR:
1939 case UNLT_EXPR:
1940 case UNLE_EXPR:
1941 case UNGT_EXPR:
1942 case UNGE_EXPR:
1943 case UNEQ_EXPR:
1944 return honor_snans;
1945
1946 case CONVERT_EXPR:
1947 case FIX_TRUNC_EXPR:
1948 /* Conversion of floating point might trap. */
1949 return honor_nans;
1950
1951 case NEGATE_EXPR:
1952 case ABS_EXPR:
1953 case CONJ_EXPR:
1954 /* These operations don't trap with floating point. */
1955 if (honor_trapv)
1956 return true;
1957 return false;
1958
1959 case PLUS_EXPR:
1960 case MINUS_EXPR:
1961 case MULT_EXPR:
1962 /* Any floating arithmetic may trap. */
1963 if (fp_operation && flag_trapping_math)
1964 return true;
1965 if (honor_trapv)
1966 return true;
1967 return false;
1968
1969 case CALL_EXPR:
1970 t = get_callee_fndecl (expr);
1971 /* Assume that calls to weak functions may trap. */
1972 if (!t || !DECL_P (t) || DECL_WEAK (t))
1973 return true;
1974 return false;
1975
1976 default:
1977 /* Any floating arithmetic may trap. */
1978 if (fp_operation && flag_trapping_math)
1979 return true;
1980 return false;
1981 }
1982 }
1983
1984 bool
1985 tree_could_throw_p (tree t)
1986 {
1987 if (!flag_exceptions)
1988 return false;
1989 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1990 {
1991 if (flag_non_call_exceptions
1992 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
1993 return true;
1994 t = GIMPLE_STMT_OPERAND (t, 1);
1995 }
1996
1997 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1998 t = TREE_OPERAND (t, 0);
1999 if (TREE_CODE (t) == CALL_EXPR)
2000 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2001 if (flag_non_call_exceptions)
2002 return tree_could_trap_p (t);
2003 return false;
2004 }
2005
2006 bool
2007 tree_can_throw_internal (tree stmt)
2008 {
2009 int region_nr;
2010 bool is_resx = false;
2011
2012 if (TREE_CODE (stmt) == RESX_EXPR)
2013 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2014 else
2015 region_nr = lookup_stmt_eh_region (stmt);
2016 if (region_nr < 0)
2017 return false;
2018 return can_throw_internal_1 (region_nr, is_resx);
2019 }
2020
2021 bool
2022 tree_can_throw_external (tree stmt)
2023 {
2024 int region_nr;
2025 bool is_resx = false;
2026
2027 if (TREE_CODE (stmt) == RESX_EXPR)
2028 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2029 else
2030 region_nr = lookup_stmt_eh_region (stmt);
2031 if (region_nr < 0)
2032 return tree_could_throw_p (stmt);
2033 else
2034 return can_throw_external_1 (region_nr, is_resx);
2035 }
2036
2037 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2038 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2039 in the table if it should be in there. Return TRUE if a replacement was
2040 done that my require an EH edge purge. */
2041
2042 bool
2043 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2044 {
2045 int region_nr = lookup_stmt_eh_region (old_stmt);
2046
2047 if (region_nr >= 0)
2048 {
2049 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2050
2051 if (new_stmt == old_stmt && new_stmt_could_throw)
2052 return false;
2053
2054 remove_stmt_from_eh_region (old_stmt);
2055 if (new_stmt_could_throw)
2056 {
2057 add_stmt_to_eh_region (new_stmt, region_nr);
2058 return false;
2059 }
2060 else
2061 return true;
2062 }
2063
2064 return false;
2065 }