re PR tree-optimization/32941 (Bootstrap comparison failure)
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "except.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
35 #include "timevar.h"
36 #include "langhooks.h"
37 #include "ggc.h"
38 #include "toplev.h"
39 #include "pointer-set.h"
40
41 \f
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p = 0;
44
45 void
46 using_eh_for_cleanups (void)
47 {
48 using_eh_for_cleanups_p = 1;
49 }
50 \f
51 /* Misc functions used in this file. */
52
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
56
57 static int
58 struct_ptr_eq (const void *a, const void *b)
59 {
60 const void * const * x = (const void * const *) a;
61 const void * const * y = (const void * const *) b;
62 return *x == *y;
63 }
64
65 static hashval_t
66 struct_ptr_hash (const void *a)
67 {
68 const void * const * x = (const void * const *) a;
69 return (size_t)*x >> 4;
70 }
71
72 \f
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
76
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
80
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
84
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
87 {
88 if (!region)
89 return;
90
91 add_stmt_to_eh_region (t, get_eh_region_number (region));
92 }
93
94 void
95 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96 {
97 struct throw_stmt_node *n;
98 void **slot;
99
100 gcc_assert (num >= 0);
101 gcc_assert (TREE_CODE (t) != RESX_EXPR);
102
103 n = GGC_NEW (struct throw_stmt_node);
104 n->stmt = t;
105 n->region_nr = num;
106
107 if (!get_eh_throw_stmt_table (ifun))
108 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109 struct_ptr_eq,
110 ggc_free));
111
112 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 gcc_assert (!*slot);
114 *slot = n;
115 }
116
117 void
118 add_stmt_to_eh_region (tree t, int num)
119 {
120 add_stmt_to_eh_region_fn (cfun, t, num);
121 }
122
123 bool
124 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
125 {
126 struct throw_stmt_node dummy;
127 void **slot;
128
129 if (!get_eh_throw_stmt_table (ifun))
130 return false;
131
132 dummy.stmt = t;
133 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
134 NO_INSERT);
135 if (slot)
136 {
137 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
138 return true;
139 }
140 else
141 return false;
142 }
143
144 bool
145 remove_stmt_from_eh_region (tree t)
146 {
147 return remove_stmt_from_eh_region_fn (cfun, t);
148 }
149
150 int
151 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
152 {
153 struct throw_stmt_node *p, n;
154
155 if (!get_eh_throw_stmt_table (ifun))
156 return -2;
157
158 n.stmt = t;
159 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
160 &n);
161
162 return (p ? p->region_nr : -1);
163 }
164
165 int
166 lookup_stmt_eh_region (tree t)
167 {
168 /* We can get called from initialized data when -fnon-call-exceptions
169 is on; prevent crash. */
170 if (!cfun)
171 return -1;
172 return lookup_stmt_eh_region_fn (cfun, t);
173 }
174
175 \f
176 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
177 nodes and LABEL_DECL nodes. We will use this during the second phase to
178 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
179
180 struct finally_tree_node
181 {
182 tree child, parent;
183 };
184
185 /* Note that this table is *not* marked GTY. It is short-lived. */
186 static htab_t finally_tree;
187
188 static void
189 record_in_finally_tree (tree child, tree parent)
190 {
191 struct finally_tree_node *n;
192 void **slot;
193
194 n = XNEW (struct finally_tree_node);
195 n->child = child;
196 n->parent = parent;
197
198 slot = htab_find_slot (finally_tree, n, INSERT);
199 gcc_assert (!*slot);
200 *slot = n;
201 }
202
203 static void
204 collect_finally_tree (tree t, tree region)
205 {
206 tailrecurse:
207 switch (TREE_CODE (t))
208 {
209 case LABEL_EXPR:
210 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
211 break;
212
213 case TRY_FINALLY_EXPR:
214 record_in_finally_tree (t, region);
215 collect_finally_tree (TREE_OPERAND (t, 0), t);
216 t = TREE_OPERAND (t, 1);
217 goto tailrecurse;
218
219 case TRY_CATCH_EXPR:
220 collect_finally_tree (TREE_OPERAND (t, 0), region);
221 t = TREE_OPERAND (t, 1);
222 goto tailrecurse;
223
224 case CATCH_EXPR:
225 t = CATCH_BODY (t);
226 goto tailrecurse;
227
228 case EH_FILTER_EXPR:
229 t = EH_FILTER_FAILURE (t);
230 goto tailrecurse;
231
232 case STATEMENT_LIST:
233 {
234 tree_stmt_iterator i;
235 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
236 collect_finally_tree (tsi_stmt (i), region);
237 }
238 break;
239
240 default:
241 /* A type, a decl, or some kind of statement that we're not
242 interested in. Don't walk them. */
243 break;
244 }
245 }
246
247 /* Use the finally tree to determine if a jump from START to TARGET
248 would leave the try_finally node that START lives in. */
249
250 static bool
251 outside_finally_tree (tree start, tree target)
252 {
253 struct finally_tree_node n, *p;
254
255 do
256 {
257 n.child = start;
258 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
259 if (!p)
260 return true;
261 start = p->parent;
262 }
263 while (start != target);
264
265 return false;
266 }
267 \f
268 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
269 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
270 The eh region creation is straight-forward, but frobbing all the gotos
271 and such into shape isn't. */
272
273 /* State of the world while lowering. */
274
275 struct leh_state
276 {
277 /* What's "current" while constructing the eh region tree. These
278 correspond to variables of the same name in cfun->eh, which we
279 don't have easy access to. */
280 struct eh_region *cur_region;
281 struct eh_region *prev_try;
282
283 /* Processing of TRY_FINALLY requires a bit more state. This is
284 split out into a separate structure so that we don't have to
285 copy so much when processing other nodes. */
286 struct leh_tf_state *tf;
287 };
288
289 struct leh_tf_state
290 {
291 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
292 is the original TRY_FINALLY_EXPR. We need to retain this so that
293 outside_finally_tree can reliably reference the tree used in the
294 collect_finally_tree data structures. */
295 tree try_finally_expr;
296 tree *top_p;
297
298 /* The state outside this try_finally node. */
299 struct leh_state *outer;
300
301 /* The exception region created for it. */
302 struct eh_region *region;
303
304 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
305 that are seen to escape this TRY_FINALLY_EXPR node. */
306 struct goto_queue_node {
307 tree stmt;
308 tree repl_stmt;
309 tree cont_stmt;
310 int index;
311 } *goto_queue;
312 size_t goto_queue_size;
313 size_t goto_queue_active;
314
315 /* Pointer map to help in searching qoto_queue when it is large. */
316 struct pointer_map_t *goto_queue_map;
317
318 /* The set of unique labels seen as entries in the goto queue. */
319 VEC(tree,heap) *dest_array;
320
321 /* A label to be added at the end of the completed transformed
322 sequence. It will be set if may_fallthru was true *at one time*,
323 though subsequent transformations may have cleared that flag. */
324 tree fallthru_label;
325
326 /* A label that has been registered with except.c to be the
327 landing pad for this try block. */
328 tree eh_label;
329
330 /* True if it is possible to fall out the bottom of the try block.
331 Cleared if the fallthru is converted to a goto. */
332 bool may_fallthru;
333
334 /* True if any entry in goto_queue is a RETURN_EXPR. */
335 bool may_return;
336
337 /* True if the finally block can receive an exception edge.
338 Cleared if the exception case is handled by code duplication. */
339 bool may_throw;
340 };
341
342 static void lower_eh_filter (struct leh_state *, tree *);
343 static void lower_eh_constructs_1 (struct leh_state *, tree *);
344
345 /* Search for STMT in the goto queue. Return the replacement,
346 or null if the statement isn't in the queue. */
347
348 #define LARGE_GOTO_QUEUE 20
349
350 static tree
351 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
352 {
353 unsigned int i;
354 void **slot;
355
356 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
357 {
358 for (i = 0; i < tf->goto_queue_active; i++)
359 if (tf->goto_queue[i].stmt == stmt)
360 return tf->goto_queue[i].repl_stmt;
361 return NULL;
362 }
363
364 /* If we have a large number of entries in the goto_queue, create a
365 pointer map and use that for searching. */
366
367 if (!tf->goto_queue_map)
368 {
369 tf->goto_queue_map = pointer_map_create ();
370 for (i = 0; i < tf->goto_queue_active; i++)
371 {
372 slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
373 gcc_assert (*slot == NULL);
374 *slot = (void *) &tf->goto_queue[i];
375 }
376 }
377
378 slot = pointer_map_contains (tf->goto_queue_map, stmt);
379 if (slot != NULL)
380 return (((struct goto_queue_node *) *slot)->repl_stmt);
381
382 return NULL;
383 }
384
385 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
386 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
387 then we can just splat it in, otherwise we add the new stmts immediately
388 after the COND_EXPR and redirect. */
389
390 static void
391 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
392 tree_stmt_iterator *tsi)
393 {
394 tree new, one, label;
395
396 new = find_goto_replacement (tf, *tp);
397 if (!new)
398 return;
399
400 one = expr_only (new);
401 if (one && TREE_CODE (one) == GOTO_EXPR)
402 {
403 *tp = one;
404 return;
405 }
406
407 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
408 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
409
410 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
411 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
412 }
413
414 /* The real work of replace_goto_queue. Returns with TSI updated to
415 point to the next statement. */
416
417 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
418
419 static void
420 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
421 {
422 switch (TREE_CODE (t))
423 {
424 case GOTO_EXPR:
425 case RETURN_EXPR:
426 t = find_goto_replacement (tf, t);
427 if (t)
428 {
429 tsi_link_before (tsi, t, TSI_SAME_STMT);
430 tsi_delink (tsi);
431 return;
432 }
433 break;
434
435 case COND_EXPR:
436 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
437 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
438 break;
439
440 case TRY_FINALLY_EXPR:
441 case TRY_CATCH_EXPR:
442 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
443 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
444 break;
445 case CATCH_EXPR:
446 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
447 break;
448 case EH_FILTER_EXPR:
449 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
450 break;
451
452 case STATEMENT_LIST:
453 gcc_unreachable ();
454
455 default:
456 /* These won't have gotos in them. */
457 break;
458 }
459
460 tsi_next (tsi);
461 }
462
463 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
464
465 static void
466 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
467 {
468 tree_stmt_iterator i = tsi_start (t);
469 while (!tsi_end_p (i))
470 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
471 }
472
473 /* Replace all goto queue members. */
474
475 static void
476 replace_goto_queue (struct leh_tf_state *tf)
477 {
478 if (tf->goto_queue_active == 0)
479 return;
480 replace_goto_queue_stmt_list (*tf->top_p, tf);
481 }
482
483 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
484 node, and if so record that fact in the goto queue associated with that
485 try_finally node. */
486
487 static void
488 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
489 {
490 struct leh_tf_state *tf = state->tf;
491 struct goto_queue_node *q;
492 size_t active, size;
493 int index;
494
495 if (!tf)
496 return;
497
498 switch (TREE_CODE (stmt))
499 {
500 case GOTO_EXPR:
501 {
502 tree lab = GOTO_DESTINATION (stmt);
503
504 /* Computed and non-local gotos do not get processed. Given
505 their nature we can neither tell whether we've escaped the
506 finally block nor redirect them if we knew. */
507 if (TREE_CODE (lab) != LABEL_DECL)
508 return;
509
510 /* No need to record gotos that don't leave the try block. */
511 if (! outside_finally_tree (lab, tf->try_finally_expr))
512 return;
513
514 if (! tf->dest_array)
515 {
516 tf->dest_array = VEC_alloc (tree, heap, 10);
517 VEC_quick_push (tree, tf->dest_array, lab);
518 index = 0;
519 }
520 else
521 {
522 int n = VEC_length (tree, tf->dest_array);
523 for (index = 0; index < n; ++index)
524 if (VEC_index (tree, tf->dest_array, index) == lab)
525 break;
526 if (index == n)
527 VEC_safe_push (tree, heap, tf->dest_array, lab);
528 }
529 }
530 break;
531
532 case RETURN_EXPR:
533 tf->may_return = true;
534 index = -1;
535 break;
536
537 default:
538 gcc_unreachable ();
539 }
540
541 gcc_assert (!tf->goto_queue_map);
542
543 active = tf->goto_queue_active;
544 size = tf->goto_queue_size;
545 if (active >= size)
546 {
547 size = (size ? size * 2 : 32);
548 tf->goto_queue_size = size;
549 tf->goto_queue
550 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
551 }
552
553 q = &tf->goto_queue[active];
554 tf->goto_queue_active = active + 1;
555
556 memset (q, 0, sizeof (*q));
557 q->stmt = stmt;
558 q->index = index;
559 }
560
561 #ifdef ENABLE_CHECKING
562 /* We do not process SWITCH_EXPRs for now. As long as the original source
563 was in fact structured, and we've not yet done jump threading, then none
564 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
565
566 static void
567 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
568 {
569 struct leh_tf_state *tf = state->tf;
570 size_t i, n;
571 tree vec;
572
573 if (!tf)
574 return;
575
576 vec = SWITCH_LABELS (switch_expr);
577 n = TREE_VEC_LENGTH (vec);
578
579 for (i = 0; i < n; ++i)
580 {
581 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
582 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
583 }
584 }
585 #else
586 #define verify_norecord_switch_expr(state, switch_expr)
587 #endif
588
589 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
590 whatever is needed to finish the return. If MOD is non-null, insert it
591 before the new branch. RETURN_VALUE_P is a cache containing a temporary
592 variable to be used in manipulating the value returned from the function. */
593
594 static void
595 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
596 tree *return_value_p)
597 {
598 tree ret_expr = TREE_OPERAND (q->stmt, 0);
599 tree x;
600
601 if (ret_expr)
602 {
603 /* The nasty part about redirecting the return value is that the
604 return value itself is to be computed before the FINALLY block
605 is executed. e.g.
606
607 int x;
608 int foo (void)
609 {
610 x = 0;
611 try {
612 return x;
613 } finally {
614 x++;
615 }
616 }
617
618 should return 0, not 1. Arrange for this to happen by copying
619 computed the return value into a local temporary. This also
620 allows us to redirect multiple return statements through the
621 same destination block; whether this is a net win or not really
622 depends, I guess, but it does make generation of the switch in
623 lower_try_finally_switch easier. */
624
625 switch (TREE_CODE (ret_expr))
626 {
627 case RESULT_DECL:
628 if (!*return_value_p)
629 *return_value_p = ret_expr;
630 else
631 gcc_assert (*return_value_p == ret_expr);
632 q->cont_stmt = q->stmt;
633 break;
634
635 case GIMPLE_MODIFY_STMT:
636 {
637 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
638 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
639
640 if (!*return_value_p)
641 {
642 if (aggregate_value_p (TREE_TYPE (result),
643 TREE_TYPE (current_function_decl)))
644 /* If this function returns in memory, copy the argument
645 into the return slot now. Otherwise, we might need to
646 worry about magic return semantics, so we need to use a
647 temporary to hold the value until we're actually ready
648 to return. */
649 new = result;
650 else
651 new = create_tmp_var (TREE_TYPE (old), "rettmp");
652 *return_value_p = new;
653 }
654 else
655 new = *return_value_p;
656
657 x = build_gimple_modify_stmt (new, old);
658 append_to_statement_list (x, &q->repl_stmt);
659
660 if (new == result)
661 x = result;
662 else
663 x = build_gimple_modify_stmt (result, new);
664 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
665 }
666
667 default:
668 gcc_unreachable ();
669 }
670 }
671 else
672 {
673 /* If we don't return a value, all return statements are the same. */
674 q->cont_stmt = q->stmt;
675 }
676
677 if (mod)
678 append_to_statement_list (mod, &q->repl_stmt);
679
680 x = build1 (GOTO_EXPR, void_type_node, finlab);
681 append_to_statement_list (x, &q->repl_stmt);
682 }
683
684 /* Similar, but easier, for GOTO_EXPR. */
685
686 static void
687 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
688 {
689 tree x;
690
691 q->cont_stmt = q->stmt;
692 if (mod)
693 append_to_statement_list (mod, &q->repl_stmt);
694
695 x = build1 (GOTO_EXPR, void_type_node, finlab);
696 append_to_statement_list (x, &q->repl_stmt);
697 }
698
699 /* We want to transform
700 try { body; } catch { stuff; }
701 to
702 body; goto over; lab: stuff; over:
703
704 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
705 should be placed before the second operand, or NULL. OVER is
706 an existing label that should be put at the exit, or NULL. */
707
708 static void
709 frob_into_branch_around (tree *tp, tree lab, tree over)
710 {
711 tree x, op1;
712
713 op1 = TREE_OPERAND (*tp, 1);
714 *tp = TREE_OPERAND (*tp, 0);
715
716 if (block_may_fallthru (*tp))
717 {
718 if (!over)
719 over = create_artificial_label ();
720 x = build1 (GOTO_EXPR, void_type_node, over);
721 append_to_statement_list (x, tp);
722 }
723
724 if (lab)
725 {
726 x = build1 (LABEL_EXPR, void_type_node, lab);
727 append_to_statement_list (x, tp);
728 }
729
730 append_to_statement_list (op1, tp);
731
732 if (over)
733 {
734 x = build1 (LABEL_EXPR, void_type_node, over);
735 append_to_statement_list (x, tp);
736 }
737 }
738
739 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
740 Make sure to record all new labels found. */
741
742 static tree
743 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
744 {
745 tree region = NULL;
746
747 t = unsave_expr_now (t);
748
749 if (outer_state->tf)
750 region = outer_state->tf->try_finally_expr;
751 collect_finally_tree (t, region);
752
753 return t;
754 }
755
756 /* A subroutine of lower_try_finally. Create a fallthru label for
757 the given try_finally state. The only tricky bit here is that
758 we have to make sure to record the label in our outer context. */
759
760 static tree
761 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
762 {
763 tree label = tf->fallthru_label;
764 if (!label)
765 {
766 label = create_artificial_label ();
767 tf->fallthru_label = label;
768 if (tf->outer->tf)
769 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
770 }
771 return label;
772 }
773
774 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
775 returns non-null, then the language requires that the exception path out
776 of a try_finally be treated specially. To wit: the code within the
777 finally block may not itself throw an exception. We have two choices here.
778 First we can duplicate the finally block and wrap it in a must_not_throw
779 region. Second, we can generate code like
780
781 try {
782 finally_block;
783 } catch {
784 if (fintmp == eh_edge)
785 protect_cleanup_actions;
786 }
787
788 where "fintmp" is the temporary used in the switch statement generation
789 alternative considered below. For the nonce, we always choose the first
790 option.
791
792 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
793
794 static void
795 honor_protect_cleanup_actions (struct leh_state *outer_state,
796 struct leh_state *this_state,
797 struct leh_tf_state *tf)
798 {
799 tree protect_cleanup_actions, finally, x;
800 tree_stmt_iterator i;
801 bool finally_may_fallthru;
802
803 /* First check for nothing to do. */
804 if (lang_protect_cleanup_actions)
805 protect_cleanup_actions = lang_protect_cleanup_actions ();
806 else
807 protect_cleanup_actions = NULL;
808
809 finally = TREE_OPERAND (*tf->top_p, 1);
810
811 /* If the EH case of the finally block can fall through, this may be a
812 structure of the form
813 try {
814 try {
815 throw ...;
816 } cleanup {
817 try {
818 throw ...;
819 } catch (...) {
820 }
821 }
822 } catch (...) {
823 yyy;
824 }
825 E.g. with an inline destructor with an embedded try block. In this
826 case we must save the runtime EH data around the nested exception.
827
828 This complication means that any time the previous runtime data might
829 be used (via fallthru from the finally) we handle the eh case here,
830 whether or not protect_cleanup_actions is active. */
831
832 finally_may_fallthru = block_may_fallthru (finally);
833 if (!finally_may_fallthru && !protect_cleanup_actions)
834 return;
835
836 /* Duplicate the FINALLY block. Only need to do this for try-finally,
837 and not for cleanups. */
838 if (this_state)
839 finally = lower_try_finally_dup_block (finally, outer_state);
840
841 /* Resume execution after the exception. Adding this now lets
842 lower_eh_filter not add unnecessary gotos, as it is clear that
843 we never fallthru from this copy of the finally block. */
844 if (finally_may_fallthru)
845 {
846 tree save_eptr, save_filt;
847
848 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
849 save_filt = create_tmp_var (integer_type_node, "save_filt");
850
851 i = tsi_start (finally);
852 x = build0 (EXC_PTR_EXPR, ptr_type_node);
853 x = build_gimple_modify_stmt (save_eptr, x);
854 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
855
856 x = build0 (FILTER_EXPR, integer_type_node);
857 x = build_gimple_modify_stmt (save_filt, x);
858 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
859
860 i = tsi_last (finally);
861 x = build0 (EXC_PTR_EXPR, ptr_type_node);
862 x = build_gimple_modify_stmt (x, save_eptr);
863 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
864
865 x = build0 (FILTER_EXPR, integer_type_node);
866 x = build_gimple_modify_stmt (x, save_filt);
867 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
868
869 x = build_resx (get_eh_region_number (tf->region));
870 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
871 }
872
873 /* Wrap the block with protect_cleanup_actions as the action. */
874 if (protect_cleanup_actions)
875 {
876 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
877 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
878 EH_FILTER_MUST_NOT_THROW (x) = 1;
879 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
880 lower_eh_filter (outer_state, &finally);
881 }
882 else
883 lower_eh_constructs_1 (outer_state, &finally);
884
885 /* Hook this up to the end of the existing try block. If we
886 previously fell through the end, we'll have to branch around.
887 This means adding a new goto, and adding it to the queue. */
888
889 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
890
891 if (tf->may_fallthru)
892 {
893 x = lower_try_finally_fallthru_label (tf);
894 x = build1 (GOTO_EXPR, void_type_node, x);
895 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
896
897 if (this_state)
898 maybe_record_in_goto_queue (this_state, x);
899
900 tf->may_fallthru = false;
901 }
902
903 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
904 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
905 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
906
907 /* Having now been handled, EH isn't to be considered with
908 the rest of the outgoing edges. */
909 tf->may_throw = false;
910 }
911
912 /* A subroutine of lower_try_finally. We have determined that there is
913 no fallthru edge out of the finally block. This means that there is
914 no outgoing edge corresponding to any incoming edge. Restructure the
915 try_finally node for this special case. */
916
917 static void
918 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
919 {
920 tree x, finally, lab, return_val;
921 struct goto_queue_node *q, *qe;
922
923 if (tf->may_throw)
924 lab = tf->eh_label;
925 else
926 lab = create_artificial_label ();
927
928 finally = TREE_OPERAND (*tf->top_p, 1);
929 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
930
931 x = build1 (LABEL_EXPR, void_type_node, lab);
932 append_to_statement_list (x, tf->top_p);
933
934 return_val = NULL;
935 q = tf->goto_queue;
936 qe = q + tf->goto_queue_active;
937 for (; q < qe; ++q)
938 if (q->index < 0)
939 do_return_redirection (q, lab, NULL, &return_val);
940 else
941 do_goto_redirection (q, lab, NULL);
942
943 replace_goto_queue (tf);
944
945 lower_eh_constructs_1 (state, &finally);
946 append_to_statement_list (finally, tf->top_p);
947 }
948
949 /* A subroutine of lower_try_finally. We have determined that there is
950 exactly one destination of the finally block. Restructure the
951 try_finally node for this special case. */
952
953 static void
954 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
955 {
956 struct goto_queue_node *q, *qe;
957 tree x, finally, finally_label;
958
959 finally = TREE_OPERAND (*tf->top_p, 1);
960 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
961
962 lower_eh_constructs_1 (state, &finally);
963
964 if (tf->may_throw)
965 {
966 /* Only reachable via the exception edge. Add the given label to
967 the head of the FINALLY block. Append a RESX at the end. */
968
969 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
970 append_to_statement_list (x, tf->top_p);
971
972 append_to_statement_list (finally, tf->top_p);
973
974 x = build_resx (get_eh_region_number (tf->region));
975
976 append_to_statement_list (x, tf->top_p);
977
978 return;
979 }
980
981 if (tf->may_fallthru)
982 {
983 /* Only reachable via the fallthru edge. Do nothing but let
984 the two blocks run together; we'll fall out the bottom. */
985 append_to_statement_list (finally, tf->top_p);
986 return;
987 }
988
989 finally_label = create_artificial_label ();
990 x = build1 (LABEL_EXPR, void_type_node, finally_label);
991 append_to_statement_list (x, tf->top_p);
992
993 append_to_statement_list (finally, tf->top_p);
994
995 q = tf->goto_queue;
996 qe = q + tf->goto_queue_active;
997
998 if (tf->may_return)
999 {
1000 /* Reachable by return expressions only. Redirect them. */
1001 tree return_val = NULL;
1002 for (; q < qe; ++q)
1003 do_return_redirection (q, finally_label, NULL, &return_val);
1004 replace_goto_queue (tf);
1005 }
1006 else
1007 {
1008 /* Reachable by goto expressions only. Redirect them. */
1009 for (; q < qe; ++q)
1010 do_goto_redirection (q, finally_label, NULL);
1011 replace_goto_queue (tf);
1012
1013 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1014 {
1015 /* Reachable by goto to fallthru label only. Redirect it
1016 to the new label (already created, sadly), and do not
1017 emit the final branch out, or the fallthru label. */
1018 tf->fallthru_label = NULL;
1019 return;
1020 }
1021 }
1022
1023 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1024 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1025 }
1026
1027 /* A subroutine of lower_try_finally. There are multiple edges incoming
1028 and outgoing from the finally block. Implement this by duplicating the
1029 finally block for every destination. */
1030
1031 static void
1032 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1033 {
1034 tree finally, new_stmt;
1035 tree x;
1036
1037 finally = TREE_OPERAND (*tf->top_p, 1);
1038 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1039
1040 new_stmt = NULL_TREE;
1041
1042 if (tf->may_fallthru)
1043 {
1044 x = lower_try_finally_dup_block (finally, state);
1045 lower_eh_constructs_1 (state, &x);
1046 append_to_statement_list (x, &new_stmt);
1047
1048 x = lower_try_finally_fallthru_label (tf);
1049 x = build1 (GOTO_EXPR, void_type_node, x);
1050 append_to_statement_list (x, &new_stmt);
1051 }
1052
1053 if (tf->may_throw)
1054 {
1055 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1056 append_to_statement_list (x, &new_stmt);
1057
1058 x = lower_try_finally_dup_block (finally, state);
1059 lower_eh_constructs_1 (state, &x);
1060 append_to_statement_list (x, &new_stmt);
1061
1062 x = build_resx (get_eh_region_number (tf->region));
1063 append_to_statement_list (x, &new_stmt);
1064 }
1065
1066 if (tf->goto_queue)
1067 {
1068 struct goto_queue_node *q, *qe;
1069 tree return_val = NULL;
1070 int return_index, index;
1071 struct labels_s
1072 {
1073 struct goto_queue_node *q;
1074 tree label;
1075 } *labels;
1076
1077 return_index = VEC_length (tree, tf->dest_array);
1078 labels = XCNEWVEC (struct labels_s, return_index + 1);
1079
1080 q = tf->goto_queue;
1081 qe = q + tf->goto_queue_active;
1082 for (; q < qe; q++)
1083 {
1084 index = q->index < 0 ? return_index : q->index;
1085
1086 if (!labels[index].q)
1087 labels[index].q = q;
1088 }
1089
1090 for (index = 0; index < return_index + 1; index++)
1091 {
1092 tree lab;
1093
1094 q = labels[index].q;
1095 if (! q)
1096 continue;
1097
1098 lab = labels[index].label = create_artificial_label ();
1099
1100 if (index == return_index)
1101 do_return_redirection (q, lab, NULL, &return_val);
1102 else
1103 do_goto_redirection (q, lab, NULL);
1104
1105 x = build1 (LABEL_EXPR, void_type_node, lab);
1106 append_to_statement_list (x, &new_stmt);
1107
1108 x = lower_try_finally_dup_block (finally, state);
1109 lower_eh_constructs_1 (state, &x);
1110 append_to_statement_list (x, &new_stmt);
1111
1112 append_to_statement_list (q->cont_stmt, &new_stmt);
1113 maybe_record_in_goto_queue (state, q->cont_stmt);
1114 }
1115
1116 for (q = tf->goto_queue; q < qe; q++)
1117 {
1118 tree lab;
1119
1120 index = q->index < 0 ? return_index : q->index;
1121
1122 if (labels[index].q == q)
1123 continue;
1124
1125 lab = labels[index].label;
1126
1127 if (index == return_index)
1128 do_return_redirection (q, lab, NULL, &return_val);
1129 else
1130 do_goto_redirection (q, lab, NULL);
1131 }
1132
1133 replace_goto_queue (tf);
1134 free (labels);
1135 }
1136
1137 /* Need to link new stmts after running replace_goto_queue due
1138 to not wanting to process the same goto stmts twice. */
1139 append_to_statement_list (new_stmt, tf->top_p);
1140 }
1141
1142 /* A subroutine of lower_try_finally. There are multiple edges incoming
1143 and outgoing from the finally block. Implement this by instrumenting
1144 each incoming edge and creating a switch statement at the end of the
1145 finally block that branches to the appropriate destination. */
1146
1147 static void
1148 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1149 {
1150 struct goto_queue_node *q, *qe;
1151 tree return_val = NULL;
1152 tree finally, finally_tmp, finally_label;
1153 int return_index, eh_index, fallthru_index;
1154 int nlabels, ndests, j, last_case_index;
1155 tree case_label_vec, switch_stmt, last_case, switch_body;
1156 tree x;
1157
1158 /* Mash the TRY block to the head of the chain. */
1159 finally = TREE_OPERAND (*tf->top_p, 1);
1160 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1161
1162 /* Lower the finally block itself. */
1163 lower_eh_constructs_1 (state, &finally);
1164
1165 /* Prepare for switch statement generation. */
1166 nlabels = VEC_length (tree, tf->dest_array);
1167 return_index = nlabels;
1168 eh_index = return_index + tf->may_return;
1169 fallthru_index = eh_index + tf->may_throw;
1170 ndests = fallthru_index + tf->may_fallthru;
1171
1172 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1173 finally_label = create_artificial_label ();
1174
1175 case_label_vec = make_tree_vec (ndests);
1176 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1177 NULL_TREE, case_label_vec);
1178 switch_body = NULL;
1179 last_case = NULL;
1180 last_case_index = 0;
1181
1182 /* Begin inserting code for getting to the finally block. Things
1183 are done in this order to correspond to the sequence the code is
1184 layed out. */
1185
1186 if (tf->may_fallthru)
1187 {
1188 x = build_gimple_modify_stmt (finally_tmp,
1189 build_int_cst (integer_type_node,
1190 fallthru_index));
1191 append_to_statement_list (x, tf->top_p);
1192
1193 if (tf->may_throw)
1194 {
1195 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1196 append_to_statement_list (x, tf->top_p);
1197 }
1198
1199
1200 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1201 build_int_cst (NULL_TREE, fallthru_index), NULL,
1202 create_artificial_label ());
1203 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1204 last_case_index++;
1205
1206 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1207 append_to_statement_list (x, &switch_body);
1208
1209 x = lower_try_finally_fallthru_label (tf);
1210 x = build1 (GOTO_EXPR, void_type_node, x);
1211 append_to_statement_list (x, &switch_body);
1212 }
1213
1214 if (tf->may_throw)
1215 {
1216 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1217 append_to_statement_list (x, tf->top_p);
1218
1219 x = build_gimple_modify_stmt (finally_tmp,
1220 build_int_cst (integer_type_node,
1221 eh_index));
1222 append_to_statement_list (x, tf->top_p);
1223
1224 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1225 build_int_cst (NULL_TREE, eh_index), NULL,
1226 create_artificial_label ());
1227 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1228 last_case_index++;
1229
1230 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1231 append_to_statement_list (x, &switch_body);
1232 x = build_resx (get_eh_region_number (tf->region));
1233 append_to_statement_list (x, &switch_body);
1234 }
1235
1236 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1237 append_to_statement_list (x, tf->top_p);
1238
1239 append_to_statement_list (finally, tf->top_p);
1240
1241 /* Redirect each incoming goto edge. */
1242 q = tf->goto_queue;
1243 qe = q + tf->goto_queue_active;
1244 j = last_case_index + tf->may_return;
1245 for (; q < qe; ++q)
1246 {
1247 tree mod;
1248 int switch_id, case_index;
1249
1250 if (q->index < 0)
1251 {
1252 mod = build_gimple_modify_stmt (finally_tmp,
1253 build_int_cst (integer_type_node,
1254 return_index));
1255 do_return_redirection (q, finally_label, mod, &return_val);
1256 switch_id = return_index;
1257 }
1258 else
1259 {
1260 mod = build_gimple_modify_stmt (finally_tmp,
1261 build_int_cst (integer_type_node,
1262 q->index));
1263 do_goto_redirection (q, finally_label, mod);
1264 switch_id = q->index;
1265 }
1266
1267 case_index = j + q->index;
1268 if (!TREE_VEC_ELT (case_label_vec, case_index))
1269 TREE_VEC_ELT (case_label_vec, case_index)
1270 = build3 (CASE_LABEL_EXPR, void_type_node,
1271 build_int_cst (NULL_TREE, switch_id), NULL,
1272 /* We store the cont_stmt in the
1273 CASE_LABEL, so that we can recover it
1274 in the loop below. We don't create
1275 the new label while walking the
1276 goto_queue because pointers don't
1277 offer a stable order. */
1278 q->cont_stmt);
1279 }
1280 for (j = last_case_index; j < last_case_index + nlabels; j++)
1281 {
1282 tree label;
1283 tree cont_stmt;
1284
1285 last_case = TREE_VEC_ELT (case_label_vec, j);
1286
1287 gcc_assert (last_case);
1288
1289 cont_stmt = CASE_LABEL (last_case);
1290
1291 label = create_artificial_label ();
1292 CASE_LABEL (last_case) = label;
1293
1294 x = build1 (LABEL_EXPR, void_type_node, label);
1295 append_to_statement_list (x, &switch_body);
1296 append_to_statement_list (cont_stmt, &switch_body);
1297 maybe_record_in_goto_queue (state, cont_stmt);
1298 }
1299 replace_goto_queue (tf);
1300
1301 /* Make sure that the last case is the default label, as one is required.
1302 Then sort the labels, which is also required in GIMPLE. */
1303 CASE_LOW (last_case) = NULL;
1304 sort_case_labels (case_label_vec);
1305
1306 /* Need to link switch_stmt after running replace_goto_queue due
1307 to not wanting to process the same goto stmts twice. */
1308 append_to_statement_list (switch_stmt, tf->top_p);
1309 append_to_statement_list (switch_body, tf->top_p);
1310 }
1311
1312 /* Decide whether or not we are going to duplicate the finally block.
1313 There are several considerations.
1314
1315 First, if this is Java, then the finally block contains code
1316 written by the user. It has line numbers associated with it,
1317 so duplicating the block means it's difficult to set a breakpoint.
1318 Since controlling code generation via -g is verboten, we simply
1319 never duplicate code without optimization.
1320
1321 Second, we'd like to prevent egregious code growth. One way to
1322 do this is to estimate the size of the finally block, multiply
1323 that by the number of copies we'd need to make, and compare against
1324 the estimate of the size of the switch machinery we'd have to add. */
1325
1326 static bool
1327 decide_copy_try_finally (int ndests, tree finally)
1328 {
1329 int f_estimate, sw_estimate;
1330
1331 if (!optimize)
1332 return false;
1333
1334 /* Finally estimate N times, plus N gotos. */
1335 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1336 f_estimate = (f_estimate + 1) * ndests;
1337
1338 /* Switch statement (cost 10), N variable assignments, N gotos. */
1339 sw_estimate = 10 + 2 * ndests;
1340
1341 /* Optimize for size clearly wants our best guess. */
1342 if (optimize_size)
1343 return f_estimate < sw_estimate;
1344
1345 /* ??? These numbers are completely made up so far. */
1346 if (optimize > 1)
1347 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1348 else
1349 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1350 }
1351
1352 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1353 to a sequence of labels and blocks, plus the exception region trees
1354 that record all the magic. This is complicated by the need to
1355 arrange for the FINALLY block to be executed on all exits. */
1356
1357 static void
1358 lower_try_finally (struct leh_state *state, tree *tp)
1359 {
1360 struct leh_tf_state this_tf;
1361 struct leh_state this_state;
1362 int ndests;
1363
1364 /* Process the try block. */
1365
1366 memset (&this_tf, 0, sizeof (this_tf));
1367 this_tf.try_finally_expr = *tp;
1368 this_tf.top_p = tp;
1369 this_tf.outer = state;
1370 if (using_eh_for_cleanups_p)
1371 this_tf.region
1372 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1373 else
1374 this_tf.region = NULL;
1375
1376 this_state.cur_region = this_tf.region;
1377 this_state.prev_try = state->prev_try;
1378 this_state.tf = &this_tf;
1379
1380 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1381
1382 /* Determine if the try block is escaped through the bottom. */
1383 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1384
1385 /* Determine if any exceptions are possible within the try block. */
1386 if (using_eh_for_cleanups_p)
1387 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1388 if (this_tf.may_throw)
1389 {
1390 this_tf.eh_label = create_artificial_label ();
1391 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1392 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1393 }
1394
1395 /* Determine how many edges (still) reach the finally block. Or rather,
1396 how many destinations are reached by the finally block. Use this to
1397 determine how we process the finally block itself. */
1398
1399 ndests = VEC_length (tree, this_tf.dest_array);
1400 ndests += this_tf.may_fallthru;
1401 ndests += this_tf.may_return;
1402 ndests += this_tf.may_throw;
1403
1404 /* If the FINALLY block is not reachable, dike it out. */
1405 if (ndests == 0)
1406 *tp = TREE_OPERAND (*tp, 0);
1407
1408 /* If the finally block doesn't fall through, then any destination
1409 we might try to impose there isn't reached either. There may be
1410 some minor amount of cleanup and redirection still needed. */
1411 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1412 lower_try_finally_nofallthru (state, &this_tf);
1413
1414 /* We can easily special-case redirection to a single destination. */
1415 else if (ndests == 1)
1416 lower_try_finally_onedest (state, &this_tf);
1417
1418 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1419 lower_try_finally_copy (state, &this_tf);
1420 else
1421 lower_try_finally_switch (state, &this_tf);
1422
1423 /* If someone requested we add a label at the end of the transformed
1424 block, do so. */
1425 if (this_tf.fallthru_label)
1426 {
1427 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1428 append_to_statement_list (x, tp);
1429 }
1430
1431 VEC_free (tree, heap, this_tf.dest_array);
1432 if (this_tf.goto_queue)
1433 free (this_tf.goto_queue);
1434 if (this_tf.goto_queue_map)
1435 pointer_map_destroy (this_tf.goto_queue_map);
1436 }
1437
1438 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1439 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1440 exception region trees that record all the magic. */
1441
1442 static void
1443 lower_catch (struct leh_state *state, tree *tp)
1444 {
1445 struct eh_region *try_region;
1446 struct leh_state this_state;
1447 tree_stmt_iterator i;
1448 tree out_label;
1449
1450 try_region = gen_eh_region_try (state->cur_region);
1451 this_state.cur_region = try_region;
1452 this_state.prev_try = try_region;
1453 this_state.tf = state->tf;
1454
1455 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1456
1457 if (!get_eh_region_may_contain_throw (try_region))
1458 {
1459 *tp = TREE_OPERAND (*tp, 0);
1460 return;
1461 }
1462
1463 out_label = NULL;
1464 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1465 {
1466 struct eh_region *catch_region;
1467 tree catch, x, eh_label;
1468
1469 catch = tsi_stmt (i);
1470 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1471
1472 this_state.cur_region = catch_region;
1473 this_state.prev_try = state->prev_try;
1474 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1475
1476 eh_label = create_artificial_label ();
1477 set_eh_region_tree_label (catch_region, eh_label);
1478
1479 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1480 tsi_link_before (&i, x, TSI_SAME_STMT);
1481
1482 if (block_may_fallthru (CATCH_BODY (catch)))
1483 {
1484 if (!out_label)
1485 out_label = create_artificial_label ();
1486
1487 x = build1 (GOTO_EXPR, void_type_node, out_label);
1488 append_to_statement_list (x, &CATCH_BODY (catch));
1489 }
1490
1491 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1492 tsi_delink (&i);
1493 }
1494
1495 frob_into_branch_around (tp, NULL, out_label);
1496 }
1497
1498 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1499 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1500 region trees that record all the magic. */
1501
1502 static void
1503 lower_eh_filter (struct leh_state *state, tree *tp)
1504 {
1505 struct leh_state this_state;
1506 struct eh_region *this_region;
1507 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1508 tree eh_label;
1509
1510 if (EH_FILTER_MUST_NOT_THROW (inner))
1511 this_region = gen_eh_region_must_not_throw (state->cur_region);
1512 else
1513 this_region = gen_eh_region_allowed (state->cur_region,
1514 EH_FILTER_TYPES (inner));
1515 this_state = *state;
1516 this_state.cur_region = this_region;
1517 /* For must not throw regions any cleanup regions inside it
1518 can't reach outer catch regions. */
1519 if (EH_FILTER_MUST_NOT_THROW (inner))
1520 this_state.prev_try = NULL;
1521
1522 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1523
1524 if (!get_eh_region_may_contain_throw (this_region))
1525 {
1526 *tp = TREE_OPERAND (*tp, 0);
1527 return;
1528 }
1529
1530 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1531 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1532
1533 eh_label = create_artificial_label ();
1534 set_eh_region_tree_label (this_region, eh_label);
1535
1536 frob_into_branch_around (tp, eh_label, NULL);
1537 }
1538
1539 /* Implement a cleanup expression. This is similar to try-finally,
1540 except that we only execute the cleanup block for exception edges. */
1541
1542 static void
1543 lower_cleanup (struct leh_state *state, tree *tp)
1544 {
1545 struct leh_state this_state;
1546 struct eh_region *this_region;
1547 struct leh_tf_state fake_tf;
1548
1549 /* If not using eh, then exception-only cleanups are no-ops. */
1550 if (!flag_exceptions)
1551 {
1552 *tp = TREE_OPERAND (*tp, 0);
1553 lower_eh_constructs_1 (state, tp);
1554 return;
1555 }
1556
1557 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1558 this_state = *state;
1559 this_state.cur_region = this_region;
1560
1561 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1562
1563 if (!get_eh_region_may_contain_throw (this_region))
1564 {
1565 *tp = TREE_OPERAND (*tp, 0);
1566 return;
1567 }
1568
1569 /* Build enough of a try-finally state so that we can reuse
1570 honor_protect_cleanup_actions. */
1571 memset (&fake_tf, 0, sizeof (fake_tf));
1572 fake_tf.top_p = tp;
1573 fake_tf.outer = state;
1574 fake_tf.region = this_region;
1575 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1576 fake_tf.may_throw = true;
1577
1578 fake_tf.eh_label = create_artificial_label ();
1579 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1580
1581 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1582
1583 if (fake_tf.may_throw)
1584 {
1585 /* In this case honor_protect_cleanup_actions had nothing to do,
1586 and we should process this normally. */
1587 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1588 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1589 }
1590 else
1591 {
1592 /* In this case honor_protect_cleanup_actions did nearly all of
1593 the work. All we have left is to append the fallthru_label. */
1594
1595 *tp = TREE_OPERAND (*tp, 0);
1596 if (fake_tf.fallthru_label)
1597 {
1598 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1599 append_to_statement_list (x, tp);
1600 }
1601 }
1602 }
1603
1604 /* Main loop for lowering eh constructs. */
1605
1606 static void
1607 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1608 {
1609 tree_stmt_iterator i;
1610 tree t = *tp;
1611
1612 switch (TREE_CODE (t))
1613 {
1614 case COND_EXPR:
1615 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1616 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1617 break;
1618
1619 case CALL_EXPR:
1620 /* Look for things that can throw exceptions, and record them. */
1621 if (state->cur_region && tree_could_throw_p (t))
1622 {
1623 record_stmt_eh_region (state->cur_region, t);
1624 note_eh_region_may_contain_throw (state->cur_region);
1625 }
1626 break;
1627
1628 case GIMPLE_MODIFY_STMT:
1629 /* Look for things that can throw exceptions, and record them. */
1630 if (state->cur_region && tree_could_throw_p (t))
1631 {
1632 record_stmt_eh_region (state->cur_region, t);
1633 note_eh_region_may_contain_throw (state->cur_region);
1634 }
1635 break;
1636
1637 case GOTO_EXPR:
1638 case RETURN_EXPR:
1639 maybe_record_in_goto_queue (state, t);
1640 break;
1641 case SWITCH_EXPR:
1642 verify_norecord_switch_expr (state, t);
1643 break;
1644
1645 case TRY_FINALLY_EXPR:
1646 lower_try_finally (state, tp);
1647 break;
1648
1649 case TRY_CATCH_EXPR:
1650 i = tsi_start (TREE_OPERAND (t, 1));
1651 switch (TREE_CODE (tsi_stmt (i)))
1652 {
1653 case CATCH_EXPR:
1654 lower_catch (state, tp);
1655 break;
1656 case EH_FILTER_EXPR:
1657 lower_eh_filter (state, tp);
1658 break;
1659 default:
1660 lower_cleanup (state, tp);
1661 break;
1662 }
1663 break;
1664
1665 case STATEMENT_LIST:
1666 for (i = tsi_start (t); !tsi_end_p (i); )
1667 {
1668 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1669 t = tsi_stmt (i);
1670 if (TREE_CODE (t) == STATEMENT_LIST)
1671 {
1672 tsi_link_before (&i, t, TSI_SAME_STMT);
1673 tsi_delink (&i);
1674 }
1675 else
1676 tsi_next (&i);
1677 }
1678 break;
1679
1680 default:
1681 /* A type, a decl, or some kind of statement that we're not
1682 interested in. Don't walk them. */
1683 break;
1684 }
1685 }
1686
1687 static unsigned int
1688 lower_eh_constructs (void)
1689 {
1690 struct leh_state null_state;
1691 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1692
1693 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1694
1695 collect_finally_tree (*tp, NULL);
1696
1697 memset (&null_state, 0, sizeof (null_state));
1698 lower_eh_constructs_1 (&null_state, tp);
1699
1700 htab_delete (finally_tree);
1701
1702 collect_eh_region_array ();
1703 return 0;
1704 }
1705
1706 struct tree_opt_pass pass_lower_eh =
1707 {
1708 "eh", /* name */
1709 NULL, /* gate */
1710 lower_eh_constructs, /* execute */
1711 NULL, /* sub */
1712 NULL, /* next */
1713 0, /* static_pass_number */
1714 TV_TREE_EH, /* tv_id */
1715 PROP_gimple_lcf, /* properties_required */
1716 PROP_gimple_leh, /* properties_provided */
1717 0, /* properties_destroyed */
1718 0, /* todo_flags_start */
1719 TODO_dump_func, /* todo_flags_finish */
1720 0 /* letter */
1721 };
1722
1723 \f
1724 /* Construct EH edges for STMT. */
1725
1726 static void
1727 make_eh_edge (struct eh_region *region, void *data)
1728 {
1729 tree stmt, lab;
1730 basic_block src, dst;
1731
1732 stmt = (tree) data;
1733 lab = get_eh_region_tree_label (region);
1734
1735 src = bb_for_stmt (stmt);
1736 dst = label_to_block (lab);
1737
1738 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1739 }
1740
1741 void
1742 make_eh_edges (tree stmt)
1743 {
1744 int region_nr;
1745 bool is_resx;
1746
1747 if (TREE_CODE (stmt) == RESX_EXPR)
1748 {
1749 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1750 is_resx = true;
1751 }
1752 else
1753 {
1754 region_nr = lookup_stmt_eh_region (stmt);
1755 if (region_nr < 0)
1756 return;
1757 is_resx = false;
1758 }
1759
1760 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1761 }
1762
1763 static bool mark_eh_edge_found_error;
1764
1765 /* Mark edge make_eh_edge would create for given region by setting it aux
1766 field, output error if something goes wrong. */
1767 static void
1768 mark_eh_edge (struct eh_region *region, void *data)
1769 {
1770 tree stmt, lab;
1771 basic_block src, dst;
1772 edge e;
1773
1774 stmt = (tree) data;
1775 lab = get_eh_region_tree_label (region);
1776
1777 src = bb_for_stmt (stmt);
1778 dst = label_to_block (lab);
1779
1780 e = find_edge (src, dst);
1781 if (!e)
1782 {
1783 error ("EH edge %i->%i is missing", src->index, dst->index);
1784 mark_eh_edge_found_error = true;
1785 }
1786 else if (!(e->flags & EDGE_EH))
1787 {
1788 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1789 mark_eh_edge_found_error = true;
1790 }
1791 else if (e->aux)
1792 {
1793 /* ??? might not be mistake. */
1794 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1795 mark_eh_edge_found_error = true;
1796 }
1797 else
1798 e->aux = (void *)1;
1799 }
1800
1801 /* Verify that BB containing stmt as last stmt has precisely the edges
1802 make_eh_edges would create. */
1803 bool
1804 verify_eh_edges (tree stmt)
1805 {
1806 int region_nr;
1807 bool is_resx;
1808 basic_block bb = bb_for_stmt (stmt);
1809 edge_iterator ei;
1810 edge e;
1811
1812 FOR_EACH_EDGE (e, ei, bb->succs)
1813 gcc_assert (!e->aux);
1814 mark_eh_edge_found_error = false;
1815 if (TREE_CODE (stmt) == RESX_EXPR)
1816 {
1817 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1818 is_resx = true;
1819 }
1820 else
1821 {
1822 region_nr = lookup_stmt_eh_region (stmt);
1823 if (region_nr < 0)
1824 {
1825 FOR_EACH_EDGE (e, ei, bb->succs)
1826 if (e->flags & EDGE_EH)
1827 {
1828 error ("BB %i can not throw but has EH edges", bb->index);
1829 return true;
1830 }
1831 return false;
1832 }
1833 if (!tree_could_throw_p (stmt))
1834 {
1835 error ("BB %i last statement has incorrectly set region", bb->index);
1836 return true;
1837 }
1838 is_resx = false;
1839 }
1840
1841 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1842 FOR_EACH_EDGE (e, ei, bb->succs)
1843 {
1844 if ((e->flags & EDGE_EH) && !e->aux)
1845 {
1846 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1847 mark_eh_edge_found_error = true;
1848 return true;
1849 }
1850 e->aux = NULL;
1851 }
1852 return mark_eh_edge_found_error;
1853 }
1854
1855 \f
1856 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1857 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1858 This routine expects only GIMPLE lhs or rhs input. */
1859
1860 bool
1861 tree_could_trap_p (tree expr)
1862 {
1863 enum tree_code code = TREE_CODE (expr);
1864 bool honor_nans = false;
1865 bool honor_snans = false;
1866 bool fp_operation = false;
1867 bool honor_trapv = false;
1868 tree t, base;
1869
1870 if (TREE_CODE_CLASS (code) == tcc_comparison
1871 || TREE_CODE_CLASS (code) == tcc_unary
1872 || TREE_CODE_CLASS (code) == tcc_binary)
1873 {
1874 t = TREE_TYPE (expr);
1875 fp_operation = FLOAT_TYPE_P (t);
1876 if (fp_operation)
1877 {
1878 honor_nans = flag_trapping_math && !flag_finite_math_only;
1879 honor_snans = flag_signaling_nans != 0;
1880 }
1881 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1882 honor_trapv = true;
1883 }
1884
1885 restart:
1886 switch (code)
1887 {
1888 case TARGET_MEM_REF:
1889 /* For TARGET_MEM_REFs use the information based on the original
1890 reference. */
1891 expr = TMR_ORIGINAL (expr);
1892 code = TREE_CODE (expr);
1893 goto restart;
1894
1895 case COMPONENT_REF:
1896 case REALPART_EXPR:
1897 case IMAGPART_EXPR:
1898 case BIT_FIELD_REF:
1899 case VIEW_CONVERT_EXPR:
1900 case WITH_SIZE_EXPR:
1901 expr = TREE_OPERAND (expr, 0);
1902 code = TREE_CODE (expr);
1903 goto restart;
1904
1905 case ARRAY_RANGE_REF:
1906 base = TREE_OPERAND (expr, 0);
1907 if (tree_could_trap_p (base))
1908 return true;
1909
1910 if (TREE_THIS_NOTRAP (expr))
1911 return false;
1912
1913 return !range_in_array_bounds_p (expr);
1914
1915 case ARRAY_REF:
1916 base = TREE_OPERAND (expr, 0);
1917 if (tree_could_trap_p (base))
1918 return true;
1919
1920 if (TREE_THIS_NOTRAP (expr))
1921 return false;
1922
1923 return !in_array_bounds_p (expr);
1924
1925 case INDIRECT_REF:
1926 case ALIGN_INDIRECT_REF:
1927 case MISALIGNED_INDIRECT_REF:
1928 return !TREE_THIS_NOTRAP (expr);
1929
1930 case ASM_EXPR:
1931 return TREE_THIS_VOLATILE (expr);
1932
1933 case TRUNC_DIV_EXPR:
1934 case CEIL_DIV_EXPR:
1935 case FLOOR_DIV_EXPR:
1936 case ROUND_DIV_EXPR:
1937 case EXACT_DIV_EXPR:
1938 case CEIL_MOD_EXPR:
1939 case FLOOR_MOD_EXPR:
1940 case ROUND_MOD_EXPR:
1941 case TRUNC_MOD_EXPR:
1942 case RDIV_EXPR:
1943 if (honor_snans || honor_trapv)
1944 return true;
1945 if (fp_operation)
1946 return flag_trapping_math;
1947 t = TREE_OPERAND (expr, 1);
1948 if (!TREE_CONSTANT (t) || integer_zerop (t))
1949 return true;
1950 return false;
1951
1952 case LT_EXPR:
1953 case LE_EXPR:
1954 case GT_EXPR:
1955 case GE_EXPR:
1956 case LTGT_EXPR:
1957 /* Some floating point comparisons may trap. */
1958 return honor_nans;
1959
1960 case EQ_EXPR:
1961 case NE_EXPR:
1962 case UNORDERED_EXPR:
1963 case ORDERED_EXPR:
1964 case UNLT_EXPR:
1965 case UNLE_EXPR:
1966 case UNGT_EXPR:
1967 case UNGE_EXPR:
1968 case UNEQ_EXPR:
1969 return honor_snans;
1970
1971 case CONVERT_EXPR:
1972 case FIX_TRUNC_EXPR:
1973 /* Conversion of floating point might trap. */
1974 return honor_nans;
1975
1976 case NEGATE_EXPR:
1977 case ABS_EXPR:
1978 case CONJ_EXPR:
1979 /* These operations don't trap with floating point. */
1980 if (honor_trapv)
1981 return true;
1982 return false;
1983
1984 case PLUS_EXPR:
1985 case MINUS_EXPR:
1986 case MULT_EXPR:
1987 /* Any floating arithmetic may trap. */
1988 if (fp_operation && flag_trapping_math)
1989 return true;
1990 if (honor_trapv)
1991 return true;
1992 return false;
1993
1994 case CALL_EXPR:
1995 t = get_callee_fndecl (expr);
1996 /* Assume that calls to weak functions may trap. */
1997 if (!t || !DECL_P (t) || DECL_WEAK (t))
1998 return true;
1999 return false;
2000
2001 default:
2002 /* Any floating arithmetic may trap. */
2003 if (fp_operation && flag_trapping_math)
2004 return true;
2005 return false;
2006 }
2007 }
2008
2009 bool
2010 tree_could_throw_p (tree t)
2011 {
2012 if (!flag_exceptions)
2013 return false;
2014 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2015 {
2016 if (flag_non_call_exceptions
2017 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2018 return true;
2019 t = GIMPLE_STMT_OPERAND (t, 1);
2020 }
2021
2022 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2023 t = TREE_OPERAND (t, 0);
2024 if (TREE_CODE (t) == CALL_EXPR)
2025 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2026 if (flag_non_call_exceptions)
2027 return tree_could_trap_p (t);
2028 return false;
2029 }
2030
2031 bool
2032 tree_can_throw_internal (tree stmt)
2033 {
2034 int region_nr;
2035 bool is_resx = false;
2036
2037 if (TREE_CODE (stmt) == RESX_EXPR)
2038 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2039 else
2040 region_nr = lookup_stmt_eh_region (stmt);
2041 if (region_nr < 0)
2042 return false;
2043 return can_throw_internal_1 (region_nr, is_resx);
2044 }
2045
2046 bool
2047 tree_can_throw_external (tree stmt)
2048 {
2049 int region_nr;
2050 bool is_resx = false;
2051
2052 if (TREE_CODE (stmt) == RESX_EXPR)
2053 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2054 else
2055 region_nr = lookup_stmt_eh_region (stmt);
2056 if (region_nr < 0)
2057 return tree_could_throw_p (stmt);
2058 else
2059 return can_throw_external_1 (region_nr, is_resx);
2060 }
2061
2062 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2063 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2064 in the table if it should be in there. Return TRUE if a replacement was
2065 done that my require an EH edge purge. */
2066
2067 bool
2068 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2069 {
2070 int region_nr = lookup_stmt_eh_region (old_stmt);
2071
2072 if (region_nr >= 0)
2073 {
2074 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2075
2076 if (new_stmt == old_stmt && new_stmt_could_throw)
2077 return false;
2078
2079 remove_stmt_from_eh_region (old_stmt);
2080 if (new_stmt_could_throw)
2081 {
2082 add_stmt_to_eh_region (new_stmt, region_nr);
2083 return false;
2084 }
2085 else
2086 return true;
2087 }
2088
2089 return false;
2090 }