re PR c++/15764 (no cleanup if temporary's dtor terminates with an exception)
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "except.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
35 #include "timevar.h"
36 #include "langhooks.h"
37 #include "ggc.h"
38 #include "toplev.h"
39 #include "pointer-set.h"
40
41 \f
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p = 0;
44
45 void
46 using_eh_for_cleanups (void)
47 {
48 using_eh_for_cleanups_p = 1;
49 }
50 \f
51 /* Misc functions used in this file. */
52
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
56
57 static int
58 struct_ptr_eq (const void *a, const void *b)
59 {
60 const void * const * x = (const void * const *) a;
61 const void * const * y = (const void * const *) b;
62 return *x == *y;
63 }
64
65 static hashval_t
66 struct_ptr_hash (const void *a)
67 {
68 const void * const * x = (const void * const *) a;
69 return (size_t)*x >> 4;
70 }
71
72 \f
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
76
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
80
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
84
85 static void
86 record_stmt_eh_region (struct eh_region *region, tree t)
87 {
88 if (!region)
89 return;
90
91 add_stmt_to_eh_region (t, get_eh_region_number (region));
92 }
93
94 void
95 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96 {
97 struct throw_stmt_node *n;
98 void **slot;
99
100 gcc_assert (num >= 0);
101 gcc_assert (TREE_CODE (t) != RESX_EXPR);
102
103 n = GGC_NEW (struct throw_stmt_node);
104 n->stmt = t;
105 n->region_nr = num;
106
107 if (!get_eh_throw_stmt_table (ifun))
108 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109 struct_ptr_eq,
110 ggc_free));
111
112 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 gcc_assert (!*slot);
114 *slot = n;
115 }
116
117 void
118 add_stmt_to_eh_region (tree t, int num)
119 {
120 add_stmt_to_eh_region_fn (cfun, t, num);
121 }
122
123 bool
124 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
125 {
126 struct throw_stmt_node dummy;
127 void **slot;
128
129 if (!get_eh_throw_stmt_table (ifun))
130 return false;
131
132 dummy.stmt = t;
133 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
134 NO_INSERT);
135 if (slot)
136 {
137 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
138 return true;
139 }
140 else
141 return false;
142 }
143
144 bool
145 remove_stmt_from_eh_region (tree t)
146 {
147 return remove_stmt_from_eh_region_fn (cfun, t);
148 }
149
150 int
151 lookup_stmt_eh_region_fn (struct function *ifun, const_tree t)
152 {
153 struct throw_stmt_node *p, n;
154
155 if (!get_eh_throw_stmt_table (ifun))
156 return -2;
157
158 /* The CONST_CAST is okay because we don't modify n.stmt throughout
159 its scope, or the scope of p. */
160 n.stmt = CONST_CAST_TREE (t);
161 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
162 &n);
163
164 return (p ? p->region_nr : -1);
165 }
166
167 int
168 lookup_stmt_eh_region (const_tree t)
169 {
170 /* We can get called from initialized data when -fnon-call-exceptions
171 is on; prevent crash. */
172 if (!cfun)
173 return -1;
174 return lookup_stmt_eh_region_fn (cfun, t);
175 }
176
177 \f
178 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
179 nodes and LABEL_DECL nodes. We will use this during the second phase to
180 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
181
182 struct finally_tree_node
183 {
184 tree child, parent;
185 };
186
187 /* Note that this table is *not* marked GTY. It is short-lived. */
188 static htab_t finally_tree;
189
190 static void
191 record_in_finally_tree (tree child, tree parent)
192 {
193 struct finally_tree_node *n;
194 void **slot;
195
196 n = XNEW (struct finally_tree_node);
197 n->child = child;
198 n->parent = parent;
199
200 slot = htab_find_slot (finally_tree, n, INSERT);
201 gcc_assert (!*slot);
202 *slot = n;
203 }
204
205 static void
206 collect_finally_tree (tree t, tree region)
207 {
208 tailrecurse:
209 switch (TREE_CODE (t))
210 {
211 case LABEL_EXPR:
212 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
213 break;
214
215 case TRY_FINALLY_EXPR:
216 record_in_finally_tree (t, region);
217 collect_finally_tree (TREE_OPERAND (t, 0), t);
218 t = TREE_OPERAND (t, 1);
219 goto tailrecurse;
220
221 case TRY_CATCH_EXPR:
222 collect_finally_tree (TREE_OPERAND (t, 0), region);
223 t = TREE_OPERAND (t, 1);
224 goto tailrecurse;
225
226 case CATCH_EXPR:
227 t = CATCH_BODY (t);
228 goto tailrecurse;
229
230 case EH_FILTER_EXPR:
231 t = EH_FILTER_FAILURE (t);
232 goto tailrecurse;
233
234 case STATEMENT_LIST:
235 {
236 tree_stmt_iterator i;
237 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
238 collect_finally_tree (tsi_stmt (i), region);
239 }
240 break;
241
242 default:
243 /* A type, a decl, or some kind of statement that we're not
244 interested in. Don't walk them. */
245 break;
246 }
247 }
248
249 /* Use the finally tree to determine if a jump from START to TARGET
250 would leave the try_finally node that START lives in. */
251
252 static bool
253 outside_finally_tree (tree start, tree target)
254 {
255 struct finally_tree_node n, *p;
256
257 do
258 {
259 n.child = start;
260 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
261 if (!p)
262 return true;
263 start = p->parent;
264 }
265 while (start != target);
266
267 return false;
268 }
269 \f
270 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
271 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
272 The eh region creation is straight-forward, but frobbing all the gotos
273 and such into shape isn't. */
274
275 /* State of the world while lowering. */
276
277 struct leh_state
278 {
279 /* What's "current" while constructing the eh region tree. These
280 correspond to variables of the same name in cfun->eh, which we
281 don't have easy access to. */
282 struct eh_region *cur_region;
283 struct eh_region *prev_try;
284
285 /* Processing of TRY_FINALLY requires a bit more state. This is
286 split out into a separate structure so that we don't have to
287 copy so much when processing other nodes. */
288 struct leh_tf_state *tf;
289 };
290
291 struct leh_tf_state
292 {
293 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
294 is the original TRY_FINALLY_EXPR. We need to retain this so that
295 outside_finally_tree can reliably reference the tree used in the
296 collect_finally_tree data structures. */
297 tree try_finally_expr;
298 tree *top_p;
299
300 /* The state outside this try_finally node. */
301 struct leh_state *outer;
302
303 /* The exception region created for it. */
304 struct eh_region *region;
305
306 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
307 that are seen to escape this TRY_FINALLY_EXPR node. */
308 struct goto_queue_node {
309 tree stmt;
310 tree repl_stmt;
311 tree cont_stmt;
312 int index;
313 } *goto_queue;
314 size_t goto_queue_size;
315 size_t goto_queue_active;
316
317 /* Pointer map to help in searching qoto_queue when it is large. */
318 struct pointer_map_t *goto_queue_map;
319
320 /* The set of unique labels seen as entries in the goto queue. */
321 VEC(tree,heap) *dest_array;
322
323 /* A label to be added at the end of the completed transformed
324 sequence. It will be set if may_fallthru was true *at one time*,
325 though subsequent transformations may have cleared that flag. */
326 tree fallthru_label;
327
328 /* A label that has been registered with except.c to be the
329 landing pad for this try block. */
330 tree eh_label;
331
332 /* True if it is possible to fall out the bottom of the try block.
333 Cleared if the fallthru is converted to a goto. */
334 bool may_fallthru;
335
336 /* True if any entry in goto_queue is a RETURN_EXPR. */
337 bool may_return;
338
339 /* True if the finally block can receive an exception edge.
340 Cleared if the exception case is handled by code duplication. */
341 bool may_throw;
342 };
343
344 static void lower_eh_filter (struct leh_state *, tree *);
345 static void lower_eh_constructs_1 (struct leh_state *, tree *);
346
347 /* Search for STMT in the goto queue. Return the replacement,
348 or null if the statement isn't in the queue. */
349
350 #define LARGE_GOTO_QUEUE 20
351
352 static tree
353 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
354 {
355 unsigned int i;
356 void **slot;
357
358 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
359 {
360 for (i = 0; i < tf->goto_queue_active; i++)
361 if (tf->goto_queue[i].stmt == stmt)
362 return tf->goto_queue[i].repl_stmt;
363 return NULL;
364 }
365
366 /* If we have a large number of entries in the goto_queue, create a
367 pointer map and use that for searching. */
368
369 if (!tf->goto_queue_map)
370 {
371 tf->goto_queue_map = pointer_map_create ();
372 for (i = 0; i < tf->goto_queue_active; i++)
373 {
374 slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
375 gcc_assert (*slot == NULL);
376 *slot = (void *) &tf->goto_queue[i];
377 }
378 }
379
380 slot = pointer_map_contains (tf->goto_queue_map, stmt);
381 if (slot != NULL)
382 return (((struct goto_queue_node *) *slot)->repl_stmt);
383
384 return NULL;
385 }
386
387 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
388 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
389 then we can just splat it in, otherwise we add the new stmts immediately
390 after the COND_EXPR and redirect. */
391
392 static void
393 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
394 tree_stmt_iterator *tsi)
395 {
396 tree new, one, label;
397
398 new = find_goto_replacement (tf, *tp);
399 if (!new)
400 return;
401
402 one = expr_only (new);
403 if (one && TREE_CODE (one) == GOTO_EXPR)
404 {
405 *tp = one;
406 return;
407 }
408
409 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
410 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
411
412 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
413 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
414 }
415
416 /* The real work of replace_goto_queue. Returns with TSI updated to
417 point to the next statement. */
418
419 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
420
421 static void
422 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
423 {
424 switch (TREE_CODE (t))
425 {
426 case GOTO_EXPR:
427 case RETURN_EXPR:
428 t = find_goto_replacement (tf, t);
429 if (t)
430 {
431 tsi_link_before (tsi, t, TSI_SAME_STMT);
432 tsi_delink (tsi);
433 return;
434 }
435 break;
436
437 case COND_EXPR:
438 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
439 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
440 break;
441
442 case TRY_FINALLY_EXPR:
443 case TRY_CATCH_EXPR:
444 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
445 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
446 break;
447 case CATCH_EXPR:
448 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
449 break;
450 case EH_FILTER_EXPR:
451 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
452 break;
453
454 case STATEMENT_LIST:
455 gcc_unreachable ();
456
457 default:
458 /* These won't have gotos in them. */
459 break;
460 }
461
462 tsi_next (tsi);
463 }
464
465 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
466
467 static void
468 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
469 {
470 tree_stmt_iterator i = tsi_start (t);
471 while (!tsi_end_p (i))
472 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
473 }
474
475 /* Replace all goto queue members. */
476
477 static void
478 replace_goto_queue (struct leh_tf_state *tf)
479 {
480 if (tf->goto_queue_active == 0)
481 return;
482 replace_goto_queue_stmt_list (*tf->top_p, tf);
483 }
484
485 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
486 node, and if so record that fact in the goto queue associated with that
487 try_finally node. */
488
489 static void
490 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
491 {
492 struct leh_tf_state *tf = state->tf;
493 struct goto_queue_node *q;
494 size_t active, size;
495 int index;
496
497 if (!tf)
498 return;
499
500 switch (TREE_CODE (stmt))
501 {
502 case GOTO_EXPR:
503 {
504 tree lab = GOTO_DESTINATION (stmt);
505
506 /* Computed and non-local gotos do not get processed. Given
507 their nature we can neither tell whether we've escaped the
508 finally block nor redirect them if we knew. */
509 if (TREE_CODE (lab) != LABEL_DECL)
510 return;
511
512 /* No need to record gotos that don't leave the try block. */
513 if (! outside_finally_tree (lab, tf->try_finally_expr))
514 return;
515
516 if (! tf->dest_array)
517 {
518 tf->dest_array = VEC_alloc (tree, heap, 10);
519 VEC_quick_push (tree, tf->dest_array, lab);
520 index = 0;
521 }
522 else
523 {
524 int n = VEC_length (tree, tf->dest_array);
525 for (index = 0; index < n; ++index)
526 if (VEC_index (tree, tf->dest_array, index) == lab)
527 break;
528 if (index == n)
529 VEC_safe_push (tree, heap, tf->dest_array, lab);
530 }
531 }
532 break;
533
534 case RETURN_EXPR:
535 tf->may_return = true;
536 index = -1;
537 break;
538
539 default:
540 gcc_unreachable ();
541 }
542
543 gcc_assert (!tf->goto_queue_map);
544
545 active = tf->goto_queue_active;
546 size = tf->goto_queue_size;
547 if (active >= size)
548 {
549 size = (size ? size * 2 : 32);
550 tf->goto_queue_size = size;
551 tf->goto_queue
552 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
553 }
554
555 q = &tf->goto_queue[active];
556 tf->goto_queue_active = active + 1;
557
558 memset (q, 0, sizeof (*q));
559 q->stmt = stmt;
560 q->index = index;
561 }
562
563 #ifdef ENABLE_CHECKING
564 /* We do not process SWITCH_EXPRs for now. As long as the original source
565 was in fact structured, and we've not yet done jump threading, then none
566 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
567
568 static void
569 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
570 {
571 struct leh_tf_state *tf = state->tf;
572 size_t i, n;
573 tree vec;
574
575 if (!tf)
576 return;
577
578 vec = SWITCH_LABELS (switch_expr);
579 n = TREE_VEC_LENGTH (vec);
580
581 for (i = 0; i < n; ++i)
582 {
583 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
584 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
585 }
586 }
587 #else
588 #define verify_norecord_switch_expr(state, switch_expr)
589 #endif
590
591 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
592 whatever is needed to finish the return. If MOD is non-null, insert it
593 before the new branch. RETURN_VALUE_P is a cache containing a temporary
594 variable to be used in manipulating the value returned from the function. */
595
596 static void
597 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
598 tree *return_value_p)
599 {
600 tree ret_expr = TREE_OPERAND (q->stmt, 0);
601 tree x;
602
603 if (ret_expr)
604 {
605 /* The nasty part about redirecting the return value is that the
606 return value itself is to be computed before the FINALLY block
607 is executed. e.g.
608
609 int x;
610 int foo (void)
611 {
612 x = 0;
613 try {
614 return x;
615 } finally {
616 x++;
617 }
618 }
619
620 should return 0, not 1. Arrange for this to happen by copying
621 computed the return value into a local temporary. This also
622 allows us to redirect multiple return statements through the
623 same destination block; whether this is a net win or not really
624 depends, I guess, but it does make generation of the switch in
625 lower_try_finally_switch easier. */
626
627 switch (TREE_CODE (ret_expr))
628 {
629 case RESULT_DECL:
630 if (!*return_value_p)
631 *return_value_p = ret_expr;
632 else
633 gcc_assert (*return_value_p == ret_expr);
634 q->cont_stmt = q->stmt;
635 break;
636
637 case GIMPLE_MODIFY_STMT:
638 {
639 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
640 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
641
642 if (!*return_value_p)
643 {
644 if (aggregate_value_p (TREE_TYPE (result),
645 TREE_TYPE (current_function_decl)))
646 /* If this function returns in memory, copy the argument
647 into the return slot now. Otherwise, we might need to
648 worry about magic return semantics, so we need to use a
649 temporary to hold the value until we're actually ready
650 to return. */
651 new = result;
652 else
653 new = create_tmp_var (TREE_TYPE (old), "rettmp");
654 *return_value_p = new;
655 }
656 else
657 new = *return_value_p;
658
659 x = build_gimple_modify_stmt (new, old);
660 append_to_statement_list (x, &q->repl_stmt);
661
662 if (new == result)
663 x = result;
664 else
665 x = build_gimple_modify_stmt (result, new);
666 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
667 }
668
669 default:
670 gcc_unreachable ();
671 }
672 }
673 else
674 {
675 /* If we don't return a value, all return statements are the same. */
676 q->cont_stmt = q->stmt;
677 }
678
679 if (mod)
680 append_to_statement_list (mod, &q->repl_stmt);
681
682 x = build1 (GOTO_EXPR, void_type_node, finlab);
683 append_to_statement_list (x, &q->repl_stmt);
684 }
685
686 /* Similar, but easier, for GOTO_EXPR. */
687
688 static void
689 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
690 {
691 tree x;
692
693 q->cont_stmt = q->stmt;
694 if (mod)
695 append_to_statement_list (mod, &q->repl_stmt);
696
697 x = build1 (GOTO_EXPR, void_type_node, finlab);
698 append_to_statement_list (x, &q->repl_stmt);
699 }
700
701 /* We want to transform
702 try { body; } catch { stuff; }
703 to
704 body; goto over; lab: stuff; over:
705
706 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
707 should be placed before the second operand, or NULL. OVER is
708 an existing label that should be put at the exit, or NULL. */
709
710 static void
711 frob_into_branch_around (tree *tp, tree lab, tree over)
712 {
713 tree x, op1;
714
715 op1 = TREE_OPERAND (*tp, 1);
716 *tp = TREE_OPERAND (*tp, 0);
717
718 if (block_may_fallthru (*tp))
719 {
720 if (!over)
721 over = create_artificial_label ();
722 x = build1 (GOTO_EXPR, void_type_node, over);
723 append_to_statement_list (x, tp);
724 }
725
726 if (lab)
727 {
728 x = build1 (LABEL_EXPR, void_type_node, lab);
729 append_to_statement_list (x, tp);
730 }
731
732 append_to_statement_list (op1, tp);
733
734 if (over)
735 {
736 x = build1 (LABEL_EXPR, void_type_node, over);
737 append_to_statement_list (x, tp);
738 }
739 }
740
741 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
742 Make sure to record all new labels found. */
743
744 static tree
745 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
746 {
747 tree region = NULL;
748
749 t = unsave_expr_now (t);
750
751 if (outer_state->tf)
752 region = outer_state->tf->try_finally_expr;
753 collect_finally_tree (t, region);
754
755 return t;
756 }
757
758 /* A subroutine of lower_try_finally. Create a fallthru label for
759 the given try_finally state. The only tricky bit here is that
760 we have to make sure to record the label in our outer context. */
761
762 static tree
763 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
764 {
765 tree label = tf->fallthru_label;
766 if (!label)
767 {
768 label = create_artificial_label ();
769 tf->fallthru_label = label;
770 if (tf->outer->tf)
771 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
772 }
773 return label;
774 }
775
776 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
777 returns non-null, then the language requires that the exception path out
778 of a try_finally be treated specially. To wit: the code within the
779 finally block may not itself throw an exception. We have two choices here.
780 First we can duplicate the finally block and wrap it in a must_not_throw
781 region. Second, we can generate code like
782
783 try {
784 finally_block;
785 } catch {
786 if (fintmp == eh_edge)
787 protect_cleanup_actions;
788 }
789
790 where "fintmp" is the temporary used in the switch statement generation
791 alternative considered below. For the nonce, we always choose the first
792 option.
793
794 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
795
796 static void
797 honor_protect_cleanup_actions (struct leh_state *outer_state,
798 struct leh_state *this_state,
799 struct leh_tf_state *tf)
800 {
801 tree protect_cleanup_actions, finally, x;
802 tree_stmt_iterator i;
803 bool finally_may_fallthru;
804
805 /* First check for nothing to do. */
806 if (lang_protect_cleanup_actions)
807 protect_cleanup_actions = lang_protect_cleanup_actions ();
808 else
809 protect_cleanup_actions = NULL;
810
811 finally = TREE_OPERAND (*tf->top_p, 1);
812
813 /* If the EH case of the finally block can fall through, this may be a
814 structure of the form
815 try {
816 try {
817 throw ...;
818 } cleanup {
819 try {
820 throw ...;
821 } catch (...) {
822 }
823 }
824 } catch (...) {
825 yyy;
826 }
827 E.g. with an inline destructor with an embedded try block. In this
828 case we must save the runtime EH data around the nested exception.
829
830 This complication means that any time the previous runtime data might
831 be used (via fallthru from the finally) we handle the eh case here,
832 whether or not protect_cleanup_actions is active. */
833
834 finally_may_fallthru = block_may_fallthru (finally);
835 if (!finally_may_fallthru && !protect_cleanup_actions)
836 return;
837
838 /* Duplicate the FINALLY block. Only need to do this for try-finally,
839 and not for cleanups. */
840 if (this_state)
841 finally = lower_try_finally_dup_block (finally, outer_state);
842
843 /* Resume execution after the exception. Adding this now lets
844 lower_eh_filter not add unnecessary gotos, as it is clear that
845 we never fallthru from this copy of the finally block. */
846 if (finally_may_fallthru)
847 {
848 tree save_eptr, save_filt;
849
850 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
851 save_filt = create_tmp_var (integer_type_node, "save_filt");
852
853 i = tsi_start (finally);
854 x = build0 (EXC_PTR_EXPR, ptr_type_node);
855 x = build_gimple_modify_stmt (save_eptr, x);
856 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
857
858 x = build0 (FILTER_EXPR, integer_type_node);
859 x = build_gimple_modify_stmt (save_filt, x);
860 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
861
862 i = tsi_last (finally);
863 x = build0 (EXC_PTR_EXPR, ptr_type_node);
864 x = build_gimple_modify_stmt (x, save_eptr);
865 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
866
867 x = build0 (FILTER_EXPR, integer_type_node);
868 x = build_gimple_modify_stmt (x, save_filt);
869 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
870
871 x = build_resx (get_eh_region_number (tf->region));
872 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
873 }
874
875 /* Wrap the block with protect_cleanup_actions as the action. */
876 if (protect_cleanup_actions)
877 {
878 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
879 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
880 EH_FILTER_MUST_NOT_THROW (x) = 1;
881 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
882 lower_eh_filter (outer_state, &finally);
883 }
884 else
885 lower_eh_constructs_1 (outer_state, &finally);
886
887 /* Hook this up to the end of the existing try block. If we
888 previously fell through the end, we'll have to branch around.
889 This means adding a new goto, and adding it to the queue. */
890
891 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
892
893 if (tf->may_fallthru)
894 {
895 x = lower_try_finally_fallthru_label (tf);
896 x = build1 (GOTO_EXPR, void_type_node, x);
897 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
898
899 if (this_state)
900 maybe_record_in_goto_queue (this_state, x);
901
902 tf->may_fallthru = false;
903 }
904
905 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
906 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
907 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
908
909 /* Having now been handled, EH isn't to be considered with
910 the rest of the outgoing edges. */
911 tf->may_throw = false;
912 }
913
914 /* A subroutine of lower_try_finally. We have determined that there is
915 no fallthru edge out of the finally block. This means that there is
916 no outgoing edge corresponding to any incoming edge. Restructure the
917 try_finally node for this special case. */
918
919 static void
920 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
921 {
922 tree x, finally, lab, return_val;
923 struct goto_queue_node *q, *qe;
924
925 if (tf->may_throw)
926 lab = tf->eh_label;
927 else
928 lab = create_artificial_label ();
929
930 finally = TREE_OPERAND (*tf->top_p, 1);
931 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
932
933 x = build1 (LABEL_EXPR, void_type_node, lab);
934 append_to_statement_list (x, tf->top_p);
935
936 return_val = NULL;
937 q = tf->goto_queue;
938 qe = q + tf->goto_queue_active;
939 for (; q < qe; ++q)
940 if (q->index < 0)
941 do_return_redirection (q, lab, NULL, &return_val);
942 else
943 do_goto_redirection (q, lab, NULL);
944
945 replace_goto_queue (tf);
946
947 lower_eh_constructs_1 (state, &finally);
948 append_to_statement_list (finally, tf->top_p);
949 }
950
951 /* A subroutine of lower_try_finally. We have determined that there is
952 exactly one destination of the finally block. Restructure the
953 try_finally node for this special case. */
954
955 static void
956 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
957 {
958 struct goto_queue_node *q, *qe;
959 tree x, finally, finally_label;
960
961 finally = TREE_OPERAND (*tf->top_p, 1);
962 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
963
964 lower_eh_constructs_1 (state, &finally);
965
966 if (tf->may_throw)
967 {
968 /* Only reachable via the exception edge. Add the given label to
969 the head of the FINALLY block. Append a RESX at the end. */
970
971 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
972 append_to_statement_list (x, tf->top_p);
973
974 append_to_statement_list (finally, tf->top_p);
975
976 x = build_resx (get_eh_region_number (tf->region));
977
978 append_to_statement_list (x, tf->top_p);
979
980 return;
981 }
982
983 if (tf->may_fallthru)
984 {
985 /* Only reachable via the fallthru edge. Do nothing but let
986 the two blocks run together; we'll fall out the bottom. */
987 append_to_statement_list (finally, tf->top_p);
988 return;
989 }
990
991 finally_label = create_artificial_label ();
992 x = build1 (LABEL_EXPR, void_type_node, finally_label);
993 append_to_statement_list (x, tf->top_p);
994
995 append_to_statement_list (finally, tf->top_p);
996
997 q = tf->goto_queue;
998 qe = q + tf->goto_queue_active;
999
1000 if (tf->may_return)
1001 {
1002 /* Reachable by return expressions only. Redirect them. */
1003 tree return_val = NULL;
1004 for (; q < qe; ++q)
1005 do_return_redirection (q, finally_label, NULL, &return_val);
1006 replace_goto_queue (tf);
1007 }
1008 else
1009 {
1010 /* Reachable by goto expressions only. Redirect them. */
1011 for (; q < qe; ++q)
1012 do_goto_redirection (q, finally_label, NULL);
1013 replace_goto_queue (tf);
1014
1015 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1016 {
1017 /* Reachable by goto to fallthru label only. Redirect it
1018 to the new label (already created, sadly), and do not
1019 emit the final branch out, or the fallthru label. */
1020 tf->fallthru_label = NULL;
1021 return;
1022 }
1023 }
1024
1025 /* Reset the locus of the goto since we're moving
1026 goto to a different block which might be on a different line. */
1027 SET_EXPR_LOCUS (tf->goto_queue[0].cont_stmt, NULL);
1028 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1029 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1030 }
1031
1032 /* A subroutine of lower_try_finally. There are multiple edges incoming
1033 and outgoing from the finally block. Implement this by duplicating the
1034 finally block for every destination. */
1035
1036 static void
1037 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1038 {
1039 tree finally, new_stmt;
1040 tree x;
1041
1042 finally = TREE_OPERAND (*tf->top_p, 1);
1043 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1044
1045 new_stmt = NULL_TREE;
1046
1047 if (tf->may_fallthru)
1048 {
1049 x = lower_try_finally_dup_block (finally, state);
1050 lower_eh_constructs_1 (state, &x);
1051 append_to_statement_list (x, &new_stmt);
1052
1053 x = lower_try_finally_fallthru_label (tf);
1054 x = build1 (GOTO_EXPR, void_type_node, x);
1055 append_to_statement_list (x, &new_stmt);
1056 }
1057
1058 if (tf->may_throw)
1059 {
1060 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1061 append_to_statement_list (x, &new_stmt);
1062
1063 x = lower_try_finally_dup_block (finally, state);
1064 lower_eh_constructs_1 (state, &x);
1065 append_to_statement_list (x, &new_stmt);
1066
1067 x = build_resx (get_eh_region_number (tf->region));
1068 append_to_statement_list (x, &new_stmt);
1069 }
1070
1071 if (tf->goto_queue)
1072 {
1073 struct goto_queue_node *q, *qe;
1074 tree return_val = NULL;
1075 int return_index, index;
1076 struct labels_s
1077 {
1078 struct goto_queue_node *q;
1079 tree label;
1080 } *labels;
1081
1082 return_index = VEC_length (tree, tf->dest_array);
1083 labels = XCNEWVEC (struct labels_s, return_index + 1);
1084
1085 q = tf->goto_queue;
1086 qe = q + tf->goto_queue_active;
1087 for (; q < qe; q++)
1088 {
1089 index = q->index < 0 ? return_index : q->index;
1090
1091 if (!labels[index].q)
1092 labels[index].q = q;
1093 }
1094
1095 for (index = 0; index < return_index + 1; index++)
1096 {
1097 tree lab;
1098
1099 q = labels[index].q;
1100 if (! q)
1101 continue;
1102
1103 lab = labels[index].label = create_artificial_label ();
1104
1105 if (index == return_index)
1106 do_return_redirection (q, lab, NULL, &return_val);
1107 else
1108 do_goto_redirection (q, lab, NULL);
1109
1110 x = build1 (LABEL_EXPR, void_type_node, lab);
1111 append_to_statement_list (x, &new_stmt);
1112
1113 x = lower_try_finally_dup_block (finally, state);
1114 lower_eh_constructs_1 (state, &x);
1115 append_to_statement_list (x, &new_stmt);
1116
1117 append_to_statement_list (q->cont_stmt, &new_stmt);
1118 maybe_record_in_goto_queue (state, q->cont_stmt);
1119 }
1120
1121 for (q = tf->goto_queue; q < qe; q++)
1122 {
1123 tree lab;
1124
1125 index = q->index < 0 ? return_index : q->index;
1126
1127 if (labels[index].q == q)
1128 continue;
1129
1130 lab = labels[index].label;
1131
1132 if (index == return_index)
1133 do_return_redirection (q, lab, NULL, &return_val);
1134 else
1135 do_goto_redirection (q, lab, NULL);
1136 }
1137
1138 replace_goto_queue (tf);
1139 free (labels);
1140 }
1141
1142 /* Need to link new stmts after running replace_goto_queue due
1143 to not wanting to process the same goto stmts twice. */
1144 append_to_statement_list (new_stmt, tf->top_p);
1145 }
1146
1147 /* A subroutine of lower_try_finally. There are multiple edges incoming
1148 and outgoing from the finally block. Implement this by instrumenting
1149 each incoming edge and creating a switch statement at the end of the
1150 finally block that branches to the appropriate destination. */
1151
1152 static void
1153 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1154 {
1155 struct goto_queue_node *q, *qe;
1156 tree return_val = NULL;
1157 tree finally, finally_tmp, finally_label;
1158 int return_index, eh_index, fallthru_index;
1159 int nlabels, ndests, j, last_case_index;
1160 tree case_label_vec, switch_stmt, last_case, switch_body;
1161 tree x;
1162
1163 /* Mash the TRY block to the head of the chain. */
1164 finally = TREE_OPERAND (*tf->top_p, 1);
1165 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1166
1167 /* Lower the finally block itself. */
1168 lower_eh_constructs_1 (state, &finally);
1169
1170 /* Prepare for switch statement generation. */
1171 nlabels = VEC_length (tree, tf->dest_array);
1172 return_index = nlabels;
1173 eh_index = return_index + tf->may_return;
1174 fallthru_index = eh_index + tf->may_throw;
1175 ndests = fallthru_index + tf->may_fallthru;
1176
1177 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1178 finally_label = create_artificial_label ();
1179
1180 case_label_vec = make_tree_vec (ndests);
1181 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1182 NULL_TREE, case_label_vec);
1183 switch_body = NULL;
1184 last_case = NULL;
1185 last_case_index = 0;
1186
1187 /* Begin inserting code for getting to the finally block. Things
1188 are done in this order to correspond to the sequence the code is
1189 layed out. */
1190
1191 if (tf->may_fallthru)
1192 {
1193 x = build_gimple_modify_stmt (finally_tmp,
1194 build_int_cst (integer_type_node,
1195 fallthru_index));
1196 append_to_statement_list (x, tf->top_p);
1197
1198 if (tf->may_throw)
1199 {
1200 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1201 append_to_statement_list (x, tf->top_p);
1202 }
1203
1204
1205 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1206 build_int_cst (NULL_TREE, fallthru_index), NULL,
1207 create_artificial_label ());
1208 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1209 last_case_index++;
1210
1211 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1212 append_to_statement_list (x, &switch_body);
1213
1214 x = lower_try_finally_fallthru_label (tf);
1215 x = build1 (GOTO_EXPR, void_type_node, x);
1216 append_to_statement_list (x, &switch_body);
1217 }
1218
1219 if (tf->may_throw)
1220 {
1221 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1222 append_to_statement_list (x, tf->top_p);
1223
1224 x = build_gimple_modify_stmt (finally_tmp,
1225 build_int_cst (integer_type_node,
1226 eh_index));
1227 append_to_statement_list (x, tf->top_p);
1228
1229 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1230 build_int_cst (NULL_TREE, eh_index), NULL,
1231 create_artificial_label ());
1232 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1233 last_case_index++;
1234
1235 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1236 append_to_statement_list (x, &switch_body);
1237 x = build_resx (get_eh_region_number (tf->region));
1238 append_to_statement_list (x, &switch_body);
1239 }
1240
1241 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1242 append_to_statement_list (x, tf->top_p);
1243
1244 append_to_statement_list (finally, tf->top_p);
1245
1246 /* Redirect each incoming goto edge. */
1247 q = tf->goto_queue;
1248 qe = q + tf->goto_queue_active;
1249 j = last_case_index + tf->may_return;
1250 for (; q < qe; ++q)
1251 {
1252 tree mod;
1253 int switch_id, case_index;
1254
1255 if (q->index < 0)
1256 {
1257 mod = build_gimple_modify_stmt (finally_tmp,
1258 build_int_cst (integer_type_node,
1259 return_index));
1260 do_return_redirection (q, finally_label, mod, &return_val);
1261 switch_id = return_index;
1262 }
1263 else
1264 {
1265 mod = build_gimple_modify_stmt (finally_tmp,
1266 build_int_cst (integer_type_node,
1267 q->index));
1268 do_goto_redirection (q, finally_label, mod);
1269 switch_id = q->index;
1270 }
1271
1272 case_index = j + q->index;
1273 if (!TREE_VEC_ELT (case_label_vec, case_index))
1274 TREE_VEC_ELT (case_label_vec, case_index)
1275 = build3 (CASE_LABEL_EXPR, void_type_node,
1276 build_int_cst (NULL_TREE, switch_id), NULL,
1277 /* We store the cont_stmt in the
1278 CASE_LABEL, so that we can recover it
1279 in the loop below. We don't create
1280 the new label while walking the
1281 goto_queue because pointers don't
1282 offer a stable order. */
1283 q->cont_stmt);
1284 }
1285 for (j = last_case_index; j < last_case_index + nlabels; j++)
1286 {
1287 tree label;
1288 tree cont_stmt;
1289
1290 last_case = TREE_VEC_ELT (case_label_vec, j);
1291
1292 gcc_assert (last_case);
1293
1294 cont_stmt = CASE_LABEL (last_case);
1295
1296 label = create_artificial_label ();
1297 CASE_LABEL (last_case) = label;
1298
1299 x = build1 (LABEL_EXPR, void_type_node, label);
1300 append_to_statement_list (x, &switch_body);
1301 append_to_statement_list (cont_stmt, &switch_body);
1302 maybe_record_in_goto_queue (state, cont_stmt);
1303 }
1304 replace_goto_queue (tf);
1305
1306 /* Make sure that the last case is the default label, as one is required.
1307 Then sort the labels, which is also required in GIMPLE. */
1308 CASE_LOW (last_case) = NULL;
1309 sort_case_labels (case_label_vec);
1310
1311 /* Need to link switch_stmt after running replace_goto_queue due
1312 to not wanting to process the same goto stmts twice. */
1313 append_to_statement_list (switch_stmt, tf->top_p);
1314 append_to_statement_list (switch_body, tf->top_p);
1315 }
1316
1317 /* Decide whether or not we are going to duplicate the finally block.
1318 There are several considerations.
1319
1320 First, if this is Java, then the finally block contains code
1321 written by the user. It has line numbers associated with it,
1322 so duplicating the block means it's difficult to set a breakpoint.
1323 Since controlling code generation via -g is verboten, we simply
1324 never duplicate code without optimization.
1325
1326 Second, we'd like to prevent egregious code growth. One way to
1327 do this is to estimate the size of the finally block, multiply
1328 that by the number of copies we'd need to make, and compare against
1329 the estimate of the size of the switch machinery we'd have to add. */
1330
1331 static bool
1332 decide_copy_try_finally (int ndests, tree finally)
1333 {
1334 int f_estimate, sw_estimate;
1335
1336 if (!optimize)
1337 return false;
1338
1339 /* Finally estimate N times, plus N gotos. */
1340 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1341 f_estimate = (f_estimate + 1) * ndests;
1342
1343 /* Switch statement (cost 10), N variable assignments, N gotos. */
1344 sw_estimate = 10 + 2 * ndests;
1345
1346 /* Optimize for size clearly wants our best guess. */
1347 if (optimize_size)
1348 return f_estimate < sw_estimate;
1349
1350 /* ??? These numbers are completely made up so far. */
1351 if (optimize > 1)
1352 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1353 else
1354 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1355 }
1356
1357 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1358 to a sequence of labels and blocks, plus the exception region trees
1359 that record all the magic. This is complicated by the need to
1360 arrange for the FINALLY block to be executed on all exits. */
1361
1362 static void
1363 lower_try_finally (struct leh_state *state, tree *tp)
1364 {
1365 struct leh_tf_state this_tf;
1366 struct leh_state this_state;
1367 int ndests;
1368
1369 /* Process the try block. */
1370
1371 memset (&this_tf, 0, sizeof (this_tf));
1372 this_tf.try_finally_expr = *tp;
1373 this_tf.top_p = tp;
1374 this_tf.outer = state;
1375 if (using_eh_for_cleanups_p)
1376 this_tf.region
1377 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1378 else
1379 this_tf.region = NULL;
1380
1381 this_state.cur_region = this_tf.region;
1382 this_state.prev_try = state->prev_try;
1383 this_state.tf = &this_tf;
1384
1385 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1386
1387 /* Determine if the try block is escaped through the bottom. */
1388 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1389
1390 /* Determine if any exceptions are possible within the try block. */
1391 if (using_eh_for_cleanups_p)
1392 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1393 if (this_tf.may_throw)
1394 {
1395 this_tf.eh_label = create_artificial_label ();
1396 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1397 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1398 }
1399
1400 /* Determine how many edges (still) reach the finally block. Or rather,
1401 how many destinations are reached by the finally block. Use this to
1402 determine how we process the finally block itself. */
1403
1404 ndests = VEC_length (tree, this_tf.dest_array);
1405 ndests += this_tf.may_fallthru;
1406 ndests += this_tf.may_return;
1407 ndests += this_tf.may_throw;
1408
1409 /* If the FINALLY block is not reachable, dike it out. */
1410 if (ndests == 0)
1411 *tp = TREE_OPERAND (*tp, 0);
1412
1413 /* If the finally block doesn't fall through, then any destination
1414 we might try to impose there isn't reached either. There may be
1415 some minor amount of cleanup and redirection still needed. */
1416 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1417 lower_try_finally_nofallthru (state, &this_tf);
1418
1419 /* We can easily special-case redirection to a single destination. */
1420 else if (ndests == 1)
1421 lower_try_finally_onedest (state, &this_tf);
1422
1423 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1424 lower_try_finally_copy (state, &this_tf);
1425 else
1426 lower_try_finally_switch (state, &this_tf);
1427
1428 /* If someone requested we add a label at the end of the transformed
1429 block, do so. */
1430 if (this_tf.fallthru_label)
1431 {
1432 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1433 append_to_statement_list (x, tp);
1434 }
1435
1436 VEC_free (tree, heap, this_tf.dest_array);
1437 if (this_tf.goto_queue)
1438 free (this_tf.goto_queue);
1439 if (this_tf.goto_queue_map)
1440 pointer_map_destroy (this_tf.goto_queue_map);
1441 }
1442
1443 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1444 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1445 exception region trees that record all the magic. */
1446
1447 static void
1448 lower_catch (struct leh_state *state, tree *tp)
1449 {
1450 struct eh_region *try_region;
1451 struct leh_state this_state;
1452 tree_stmt_iterator i;
1453 tree out_label;
1454
1455 try_region = gen_eh_region_try (state->cur_region);
1456 this_state.cur_region = try_region;
1457 this_state.prev_try = try_region;
1458 this_state.tf = state->tf;
1459
1460 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1461
1462 if (!get_eh_region_may_contain_throw (try_region))
1463 {
1464 *tp = TREE_OPERAND (*tp, 0);
1465 return;
1466 }
1467
1468 out_label = NULL;
1469 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1470 {
1471 struct eh_region *catch_region;
1472 tree catch, x, eh_label;
1473
1474 catch = tsi_stmt (i);
1475 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1476
1477 this_state.cur_region = catch_region;
1478 this_state.prev_try = state->prev_try;
1479 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1480
1481 eh_label = create_artificial_label ();
1482 set_eh_region_tree_label (catch_region, eh_label);
1483
1484 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1485 tsi_link_before (&i, x, TSI_SAME_STMT);
1486
1487 if (block_may_fallthru (CATCH_BODY (catch)))
1488 {
1489 if (!out_label)
1490 out_label = create_artificial_label ();
1491
1492 x = build1 (GOTO_EXPR, void_type_node, out_label);
1493 append_to_statement_list (x, &CATCH_BODY (catch));
1494 }
1495
1496 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1497 tsi_delink (&i);
1498 }
1499
1500 frob_into_branch_around (tp, NULL, out_label);
1501 }
1502
1503 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1504 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1505 region trees that record all the magic. */
1506
1507 static void
1508 lower_eh_filter (struct leh_state *state, tree *tp)
1509 {
1510 struct leh_state this_state;
1511 struct eh_region *this_region;
1512 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1513 tree eh_label;
1514
1515 if (EH_FILTER_MUST_NOT_THROW (inner))
1516 this_region = gen_eh_region_must_not_throw (state->cur_region);
1517 else
1518 this_region = gen_eh_region_allowed (state->cur_region,
1519 EH_FILTER_TYPES (inner));
1520 this_state = *state;
1521 this_state.cur_region = this_region;
1522 /* For must not throw regions any cleanup regions inside it
1523 can't reach outer catch regions. */
1524 if (EH_FILTER_MUST_NOT_THROW (inner))
1525 this_state.prev_try = NULL;
1526
1527 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1528
1529 if (!get_eh_region_may_contain_throw (this_region))
1530 {
1531 *tp = TREE_OPERAND (*tp, 0);
1532 return;
1533 }
1534
1535 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1536 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1537
1538 eh_label = create_artificial_label ();
1539 set_eh_region_tree_label (this_region, eh_label);
1540
1541 frob_into_branch_around (tp, eh_label, NULL);
1542 }
1543
1544 /* Implement a cleanup expression. This is similar to try-finally,
1545 except that we only execute the cleanup block for exception edges. */
1546
1547 static void
1548 lower_cleanup (struct leh_state *state, tree *tp)
1549 {
1550 struct leh_state this_state;
1551 struct eh_region *this_region;
1552 struct leh_tf_state fake_tf;
1553
1554 /* If not using eh, then exception-only cleanups are no-ops. */
1555 if (!flag_exceptions)
1556 {
1557 *tp = TREE_OPERAND (*tp, 0);
1558 lower_eh_constructs_1 (state, tp);
1559 return;
1560 }
1561
1562 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1563 this_state = *state;
1564 this_state.cur_region = this_region;
1565
1566 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1567
1568 if (!get_eh_region_may_contain_throw (this_region))
1569 {
1570 *tp = TREE_OPERAND (*tp, 0);
1571 return;
1572 }
1573
1574 /* Build enough of a try-finally state so that we can reuse
1575 honor_protect_cleanup_actions. */
1576 memset (&fake_tf, 0, sizeof (fake_tf));
1577 fake_tf.top_p = tp;
1578 fake_tf.outer = state;
1579 fake_tf.region = this_region;
1580 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1581 fake_tf.may_throw = true;
1582
1583 fake_tf.eh_label = create_artificial_label ();
1584 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1585
1586 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1587
1588 if (fake_tf.may_throw)
1589 {
1590 /* In this case honor_protect_cleanup_actions had nothing to do,
1591 and we should process this normally. */
1592 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1593 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1594 }
1595 else
1596 {
1597 /* In this case honor_protect_cleanup_actions did nearly all of
1598 the work. All we have left is to append the fallthru_label. */
1599
1600 *tp = TREE_OPERAND (*tp, 0);
1601 if (fake_tf.fallthru_label)
1602 {
1603 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1604 append_to_statement_list (x, tp);
1605 }
1606 }
1607 }
1608
1609 /* Main loop for lowering eh constructs. */
1610
1611 static void
1612 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1613 {
1614 tree_stmt_iterator i;
1615 tree t = *tp;
1616
1617 switch (TREE_CODE (t))
1618 {
1619 case COND_EXPR:
1620 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1621 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1622 break;
1623
1624 case CALL_EXPR:
1625 /* Look for things that can throw exceptions, and record them. */
1626 if (state->cur_region && tree_could_throw_p (t))
1627 {
1628 record_stmt_eh_region (state->cur_region, t);
1629 note_eh_region_may_contain_throw (state->cur_region);
1630 }
1631 break;
1632
1633 case GIMPLE_MODIFY_STMT:
1634 /* Look for things that can throw exceptions, and record them. */
1635 if (state->cur_region && tree_could_throw_p (t))
1636 {
1637 record_stmt_eh_region (state->cur_region, t);
1638 note_eh_region_may_contain_throw (state->cur_region);
1639 }
1640 break;
1641
1642 case GOTO_EXPR:
1643 case RETURN_EXPR:
1644 maybe_record_in_goto_queue (state, t);
1645 break;
1646 case SWITCH_EXPR:
1647 verify_norecord_switch_expr (state, t);
1648 break;
1649
1650 case TRY_FINALLY_EXPR:
1651 lower_try_finally (state, tp);
1652 break;
1653
1654 case TRY_CATCH_EXPR:
1655 i = tsi_start (TREE_OPERAND (t, 1));
1656 switch (TREE_CODE (tsi_stmt (i)))
1657 {
1658 case CATCH_EXPR:
1659 lower_catch (state, tp);
1660 break;
1661 case EH_FILTER_EXPR:
1662 lower_eh_filter (state, tp);
1663 break;
1664 default:
1665 lower_cleanup (state, tp);
1666 break;
1667 }
1668 break;
1669
1670 case STATEMENT_LIST:
1671 for (i = tsi_start (t); !tsi_end_p (i); )
1672 {
1673 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1674 t = tsi_stmt (i);
1675 if (TREE_CODE (t) == STATEMENT_LIST)
1676 {
1677 tsi_link_before (&i, t, TSI_SAME_STMT);
1678 tsi_delink (&i);
1679 }
1680 else
1681 tsi_next (&i);
1682 }
1683 break;
1684
1685 default:
1686 /* A type, a decl, or some kind of statement that we're not
1687 interested in. Don't walk them. */
1688 break;
1689 }
1690 }
1691
1692 static unsigned int
1693 lower_eh_constructs (void)
1694 {
1695 struct leh_state null_state;
1696 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1697
1698 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1699
1700 collect_finally_tree (*tp, NULL);
1701
1702 memset (&null_state, 0, sizeof (null_state));
1703 lower_eh_constructs_1 (&null_state, tp);
1704
1705 htab_delete (finally_tree);
1706
1707 collect_eh_region_array ();
1708 return 0;
1709 }
1710
1711 struct tree_opt_pass pass_lower_eh =
1712 {
1713 "eh", /* name */
1714 NULL, /* gate */
1715 lower_eh_constructs, /* execute */
1716 NULL, /* sub */
1717 NULL, /* next */
1718 0, /* static_pass_number */
1719 TV_TREE_EH, /* tv_id */
1720 PROP_gimple_lcf, /* properties_required */
1721 PROP_gimple_leh, /* properties_provided */
1722 0, /* properties_destroyed */
1723 0, /* todo_flags_start */
1724 TODO_dump_func, /* todo_flags_finish */
1725 0 /* letter */
1726 };
1727
1728 \f
1729 /* Construct EH edges for STMT. */
1730
1731 static void
1732 make_eh_edge (struct eh_region *region, void *data)
1733 {
1734 tree stmt, lab;
1735 basic_block src, dst;
1736
1737 stmt = (tree) data;
1738 lab = get_eh_region_tree_label (region);
1739
1740 src = bb_for_stmt (stmt);
1741 dst = label_to_block (lab);
1742
1743 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1744 }
1745
1746 void
1747 make_eh_edges (tree stmt)
1748 {
1749 int region_nr;
1750 bool is_resx;
1751
1752 if (TREE_CODE (stmt) == RESX_EXPR)
1753 {
1754 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1755 is_resx = true;
1756 }
1757 else
1758 {
1759 region_nr = lookup_stmt_eh_region (stmt);
1760 if (region_nr < 0)
1761 return;
1762 is_resx = false;
1763 }
1764
1765 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1766 }
1767
1768 static bool mark_eh_edge_found_error;
1769
1770 /* Mark edge make_eh_edge would create for given region by setting it aux
1771 field, output error if something goes wrong. */
1772 static void
1773 mark_eh_edge (struct eh_region *region, void *data)
1774 {
1775 tree stmt, lab;
1776 basic_block src, dst;
1777 edge e;
1778
1779 stmt = (tree) data;
1780 lab = get_eh_region_tree_label (region);
1781
1782 src = bb_for_stmt (stmt);
1783 dst = label_to_block (lab);
1784
1785 e = find_edge (src, dst);
1786 if (!e)
1787 {
1788 error ("EH edge %i->%i is missing", src->index, dst->index);
1789 mark_eh_edge_found_error = true;
1790 }
1791 else if (!(e->flags & EDGE_EH))
1792 {
1793 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1794 mark_eh_edge_found_error = true;
1795 }
1796 else if (e->aux)
1797 {
1798 /* ??? might not be mistake. */
1799 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1800 mark_eh_edge_found_error = true;
1801 }
1802 else
1803 e->aux = (void *)1;
1804 }
1805
1806 /* Verify that BB containing stmt as last stmt has precisely the edges
1807 make_eh_edges would create. */
1808 bool
1809 verify_eh_edges (tree stmt)
1810 {
1811 int region_nr;
1812 bool is_resx;
1813 basic_block bb = bb_for_stmt (stmt);
1814 edge_iterator ei;
1815 edge e;
1816
1817 FOR_EACH_EDGE (e, ei, bb->succs)
1818 gcc_assert (!e->aux);
1819 mark_eh_edge_found_error = false;
1820 if (TREE_CODE (stmt) == RESX_EXPR)
1821 {
1822 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1823 is_resx = true;
1824 }
1825 else
1826 {
1827 region_nr = lookup_stmt_eh_region (stmt);
1828 if (region_nr < 0)
1829 {
1830 FOR_EACH_EDGE (e, ei, bb->succs)
1831 if (e->flags & EDGE_EH)
1832 {
1833 error ("BB %i can not throw but has EH edges", bb->index);
1834 return true;
1835 }
1836 return false;
1837 }
1838 if (!tree_could_throw_p (stmt))
1839 {
1840 error ("BB %i last statement has incorrectly set region", bb->index);
1841 return true;
1842 }
1843 is_resx = false;
1844 }
1845
1846 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1847 FOR_EACH_EDGE (e, ei, bb->succs)
1848 {
1849 if ((e->flags & EDGE_EH) && !e->aux)
1850 {
1851 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1852 mark_eh_edge_found_error = true;
1853 return true;
1854 }
1855 e->aux = NULL;
1856 }
1857 return mark_eh_edge_found_error;
1858 }
1859
1860 \f
1861 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1862 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1863 This routine expects only GIMPLE lhs or rhs input. */
1864
1865 bool
1866 tree_could_trap_p (tree expr)
1867 {
1868 enum tree_code code = TREE_CODE (expr);
1869 bool honor_nans = false;
1870 bool honor_snans = false;
1871 bool fp_operation = false;
1872 bool honor_trapv = false;
1873 tree t, base;
1874
1875 if (TREE_CODE_CLASS (code) == tcc_comparison
1876 || TREE_CODE_CLASS (code) == tcc_unary
1877 || TREE_CODE_CLASS (code) == tcc_binary)
1878 {
1879 t = TREE_TYPE (expr);
1880 fp_operation = FLOAT_TYPE_P (t);
1881 if (fp_operation)
1882 {
1883 honor_nans = flag_trapping_math && !flag_finite_math_only;
1884 honor_snans = flag_signaling_nans != 0;
1885 }
1886 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1887 honor_trapv = true;
1888 }
1889
1890 restart:
1891 switch (code)
1892 {
1893 case TARGET_MEM_REF:
1894 /* For TARGET_MEM_REFs use the information based on the original
1895 reference. */
1896 expr = TMR_ORIGINAL (expr);
1897 code = TREE_CODE (expr);
1898 goto restart;
1899
1900 case COMPONENT_REF:
1901 case REALPART_EXPR:
1902 case IMAGPART_EXPR:
1903 case BIT_FIELD_REF:
1904 case VIEW_CONVERT_EXPR:
1905 case WITH_SIZE_EXPR:
1906 expr = TREE_OPERAND (expr, 0);
1907 code = TREE_CODE (expr);
1908 goto restart;
1909
1910 case ARRAY_RANGE_REF:
1911 base = TREE_OPERAND (expr, 0);
1912 if (tree_could_trap_p (base))
1913 return true;
1914
1915 if (TREE_THIS_NOTRAP (expr))
1916 return false;
1917
1918 return !range_in_array_bounds_p (expr);
1919
1920 case ARRAY_REF:
1921 base = TREE_OPERAND (expr, 0);
1922 if (tree_could_trap_p (base))
1923 return true;
1924
1925 if (TREE_THIS_NOTRAP (expr))
1926 return false;
1927
1928 return !in_array_bounds_p (expr);
1929
1930 case INDIRECT_REF:
1931 case ALIGN_INDIRECT_REF:
1932 case MISALIGNED_INDIRECT_REF:
1933 return !TREE_THIS_NOTRAP (expr);
1934
1935 case ASM_EXPR:
1936 return TREE_THIS_VOLATILE (expr);
1937
1938 case TRUNC_DIV_EXPR:
1939 case CEIL_DIV_EXPR:
1940 case FLOOR_DIV_EXPR:
1941 case ROUND_DIV_EXPR:
1942 case EXACT_DIV_EXPR:
1943 case CEIL_MOD_EXPR:
1944 case FLOOR_MOD_EXPR:
1945 case ROUND_MOD_EXPR:
1946 case TRUNC_MOD_EXPR:
1947 case RDIV_EXPR:
1948 if (honor_snans || honor_trapv)
1949 return true;
1950 if (fp_operation)
1951 return flag_trapping_math;
1952 t = TREE_OPERAND (expr, 1);
1953 if (!TREE_CONSTANT (t) || integer_zerop (t))
1954 return true;
1955 return false;
1956
1957 case LT_EXPR:
1958 case LE_EXPR:
1959 case GT_EXPR:
1960 case GE_EXPR:
1961 case LTGT_EXPR:
1962 /* Some floating point comparisons may trap. */
1963 return honor_nans;
1964
1965 case EQ_EXPR:
1966 case NE_EXPR:
1967 case UNORDERED_EXPR:
1968 case ORDERED_EXPR:
1969 case UNLT_EXPR:
1970 case UNLE_EXPR:
1971 case UNGT_EXPR:
1972 case UNGE_EXPR:
1973 case UNEQ_EXPR:
1974 return honor_snans;
1975
1976 case CONVERT_EXPR:
1977 case FIX_TRUNC_EXPR:
1978 /* Conversion of floating point might trap. */
1979 return honor_nans;
1980
1981 case NEGATE_EXPR:
1982 case ABS_EXPR:
1983 case CONJ_EXPR:
1984 /* These operations don't trap with floating point. */
1985 if (honor_trapv)
1986 return true;
1987 return false;
1988
1989 case PLUS_EXPR:
1990 case MINUS_EXPR:
1991 case MULT_EXPR:
1992 /* Any floating arithmetic may trap. */
1993 if (fp_operation && flag_trapping_math)
1994 return true;
1995 if (honor_trapv)
1996 return true;
1997 return false;
1998
1999 case CALL_EXPR:
2000 t = get_callee_fndecl (expr);
2001 /* Assume that calls to weak functions may trap. */
2002 if (!t || !DECL_P (t) || DECL_WEAK (t))
2003 return true;
2004 return false;
2005
2006 default:
2007 /* Any floating arithmetic may trap. */
2008 if (fp_operation && flag_trapping_math)
2009 return true;
2010 return false;
2011 }
2012 }
2013
2014 bool
2015 tree_could_throw_p (tree t)
2016 {
2017 if (!flag_exceptions)
2018 return false;
2019 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2020 {
2021 if (flag_non_call_exceptions
2022 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2023 return true;
2024 t = GIMPLE_STMT_OPERAND (t, 1);
2025 }
2026
2027 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2028 t = TREE_OPERAND (t, 0);
2029 if (TREE_CODE (t) == CALL_EXPR)
2030 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2031 if (flag_non_call_exceptions)
2032 return tree_could_trap_p (t);
2033 return false;
2034 }
2035
2036 bool
2037 tree_can_throw_internal (const_tree stmt)
2038 {
2039 int region_nr;
2040 bool is_resx = false;
2041
2042 if (TREE_CODE (stmt) == RESX_EXPR)
2043 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2044 else
2045 region_nr = lookup_stmt_eh_region (stmt);
2046 if (region_nr < 0)
2047 return false;
2048 return can_throw_internal_1 (region_nr, is_resx);
2049 }
2050
2051 bool
2052 tree_can_throw_external (tree stmt)
2053 {
2054 int region_nr;
2055 bool is_resx = false;
2056
2057 if (TREE_CODE (stmt) == RESX_EXPR)
2058 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2059 else
2060 region_nr = lookup_stmt_eh_region (stmt);
2061 if (region_nr < 0)
2062 return tree_could_throw_p (stmt);
2063 else
2064 return can_throw_external_1 (region_nr, is_resx);
2065 }
2066
2067 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2068 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2069 in the table if it should be in there. Return TRUE if a replacement was
2070 done that my require an EH edge purge. */
2071
2072 bool
2073 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2074 {
2075 int region_nr = lookup_stmt_eh_region (old_stmt);
2076
2077 if (region_nr >= 0)
2078 {
2079 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2080
2081 if (new_stmt == old_stmt && new_stmt_could_throw)
2082 return false;
2083
2084 remove_stmt_from_eh_region (old_stmt);
2085 if (new_stmt_could_throw)
2086 {
2087 add_stmt_to_eh_region (new_stmt, region_nr);
2088 return false;
2089 }
2090 else
2091 return true;
2092 }
2093
2094 return false;
2095 }
2096 \f
2097 /* Returns TRUE if oneh and twoh are exception handlers (op 1 of
2098 TRY_CATCH_EXPR or TRY_FINALLY_EXPR that are similar enough to be
2099 considered the same. Currently this only handles handlers consisting of
2100 a single call, as that's the important case for C++: a destructor call
2101 for a particular object showing up in multiple handlers. */
2102
2103 static bool
2104 same_handler_p (tree oneh, tree twoh)
2105 {
2106 tree_stmt_iterator i;
2107 tree ones, twos;
2108 int ai;
2109
2110 i = tsi_start (oneh);
2111 if (!tsi_one_before_end_p (i))
2112 return false;
2113 ones = tsi_stmt (i);
2114
2115 i = tsi_start (twoh);
2116 if (!tsi_one_before_end_p (i))
2117 return false;
2118 twos = tsi_stmt (i);
2119
2120 if (TREE_CODE (ones) != CALL_EXPR
2121 || TREE_CODE (twos) != CALL_EXPR
2122 || !operand_equal_p (CALL_EXPR_FN (ones), CALL_EXPR_FN (twos), 0)
2123 || call_expr_nargs (ones) != call_expr_nargs (twos))
2124 return false;
2125
2126 for (ai = 0; ai < call_expr_nargs (ones); ++ai)
2127 if (!operand_equal_p (CALL_EXPR_ARG (ones, ai),
2128 CALL_EXPR_ARG (twos, ai), 0))
2129 return false;
2130
2131 return true;
2132 }
2133
2134 /* Optimize
2135 try { A() } finally { try { ~B() } catch { ~A() } }
2136 try { ... } finally { ~A() }
2137 into
2138 try { A() } catch { ~B() }
2139 try { ~B() ... } finally { ~A() }
2140
2141 This occurs frequently in C++, where A is a local variable and B is a
2142 temporary used in the initializer for A. */
2143
2144 static void
2145 optimize_double_finally (tree one, tree two)
2146 {
2147 tree oneh;
2148 tree_stmt_iterator i;
2149
2150 i = tsi_start (TREE_OPERAND (one, 1));
2151 if (!tsi_one_before_end_p (i))
2152 return;
2153
2154 oneh = tsi_stmt (i);
2155 if (TREE_CODE (oneh) != TRY_CATCH_EXPR)
2156 return;
2157
2158 if (same_handler_p (TREE_OPERAND (oneh, 1), TREE_OPERAND (two, 1)))
2159 {
2160 tree twoh;
2161
2162 tree b = TREE_OPERAND (oneh, 0);
2163 TREE_OPERAND (one, 1) = b;
2164 TREE_SET_CODE (one, TRY_CATCH_EXPR);
2165
2166 b = tsi_stmt (tsi_start (b));
2167 twoh = TREE_OPERAND (two, 0);
2168 /* same_handler_p only handles single-statement handlers,
2169 so there must only be one statement. */
2170 i = tsi_start (twoh);
2171 tsi_link_before (&i, unshare_expr (b), TSI_SAME_STMT);
2172 }
2173 }
2174
2175 /* Perform EH refactoring optimizations that are simpler to do when code
2176 flow has been lowered but EH structurs haven't. */
2177
2178 static void
2179 refactor_eh_r (tree t)
2180 {
2181 tailrecurse:
2182 switch (TREE_CODE (t))
2183 {
2184 case TRY_FINALLY_EXPR:
2185 case TRY_CATCH_EXPR:
2186 refactor_eh_r (TREE_OPERAND (t, 0));
2187 t = TREE_OPERAND (t, 1);
2188 goto tailrecurse;
2189
2190 case CATCH_EXPR:
2191 t = CATCH_BODY (t);
2192 goto tailrecurse;
2193
2194 case EH_FILTER_EXPR:
2195 t = EH_FILTER_FAILURE (t);
2196 goto tailrecurse;
2197
2198 case STATEMENT_LIST:
2199 {
2200 tree_stmt_iterator i;
2201 tree one = NULL_TREE, two = NULL_TREE;
2202 /* Try to refactor double try/finally. */
2203 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2204 {
2205 one = two;
2206 two = tsi_stmt (i);
2207 if (one && two
2208 && TREE_CODE (one) == TRY_FINALLY_EXPR
2209 && TREE_CODE (two) == TRY_FINALLY_EXPR)
2210 optimize_double_finally (one, two);
2211 if (one)
2212 refactor_eh_r (one);
2213 }
2214 if (two)
2215 {
2216 t = two;
2217 goto tailrecurse;
2218 }
2219 }
2220 break;
2221
2222 default:
2223 /* A type, a decl, or some kind of statement that we're not
2224 interested in. Don't walk them. */
2225 break;
2226 }
2227 }
2228
2229 static unsigned
2230 refactor_eh (void)
2231 {
2232 refactor_eh_r (DECL_SAVED_TREE (current_function_decl));
2233 return 0;
2234 }
2235
2236 struct tree_opt_pass pass_refactor_eh =
2237 {
2238 "ehopt", /* name */
2239 NULL, /* gate */
2240 refactor_eh, /* execute */
2241 NULL, /* sub */
2242 NULL, /* next */
2243 0, /* static_pass_number */
2244 TV_TREE_EH, /* tv_id */
2245 PROP_gimple_lcf, /* properties_required */
2246 0, /* properties_provided */
2247 0, /* properties_destroyed */
2248 0, /* todo_flags_start */
2249 TODO_dump_func, /* todo_flags_finish */
2250 0 /* letter */
2251 };