re PR middle-end/23401 (Gimplifier produces too many temporaries)
[gcc.git] / gcc / tree-eh.c
1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "except.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "langhooks.h"
38 #include "ggc.h"
39 #include "toplev.h"
40 #include "gimple.h"
41
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
45
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
48
49 void
50 using_eh_for_cleanups (void)
51 {
52 using_eh_for_cleanups_p = 1;
53 }
54
55 /* Misc functions used in this file. */
56
57 /* Compare and hash for any structure which begins with a canonical
58 pointer. Assumes all pointers are interchangeable, which is sort
59 of already assumed by gcc elsewhere IIRC. */
60
61 static int
62 struct_ptr_eq (const void *a, const void *b)
63 {
64 const void * const * x = (const void * const *) a;
65 const void * const * y = (const void * const *) b;
66 return *x == *y;
67 }
68
69 static hashval_t
70 struct_ptr_hash (const void *a)
71 {
72 const void * const * x = (const void * const *) a;
73 return (size_t)*x >> 4;
74 }
75
76
77 /* Remember and lookup EH region data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
80
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
84
85 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
88
89 static void
90 record_stmt_eh_region (struct eh_region *region, gimple t)
91 {
92 if (!region)
93 return;
94
95 add_stmt_to_eh_region (t, get_eh_region_number (region));
96 }
97
98
99 /* Add statement T in function IFUN to EH region NUM. */
100
101 void
102 add_stmt_to_eh_region_fn (struct function *ifun, gimple t, int num)
103 {
104 struct throw_stmt_node *n;
105 void **slot;
106
107 gcc_assert (num >= 0);
108 gcc_assert (gimple_code (t) != GIMPLE_RESX);
109
110 n = GGC_NEW (struct throw_stmt_node);
111 n->stmt = t;
112 n->region_nr = num;
113
114 if (!get_eh_throw_stmt_table (ifun))
115 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
116 struct_ptr_eq,
117 ggc_free));
118
119 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
120 gcc_assert (!*slot);
121 *slot = n;
122 }
123
124
125 /* Add statement T in the current function (cfun) to EH region number
126 NUM. */
127
128 void
129 add_stmt_to_eh_region (gimple t, int num)
130 {
131 add_stmt_to_eh_region_fn (cfun, t, num);
132 }
133
134
135 /* Remove statement T in function IFUN from the EH region holding it. */
136
137 bool
138 remove_stmt_from_eh_region_fn (struct function *ifun, gimple t)
139 {
140 struct throw_stmt_node dummy;
141 void **slot;
142
143 if (!get_eh_throw_stmt_table (ifun))
144 return false;
145
146 dummy.stmt = t;
147 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
148 NO_INSERT);
149 if (slot)
150 {
151 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
152 return true;
153 }
154 else
155 return false;
156 }
157
158
159 /* Remove statement T in the current function (cfun) from the EH
160 region holding it. */
161
162 bool
163 remove_stmt_from_eh_region (gimple t)
164 {
165 return remove_stmt_from_eh_region_fn (cfun, t);
166 }
167
168 /* Determine if statement T is inside an EH region in function IFUN.
169 Return the EH region number if found, return -2 if IFUN does not
170 have an EH table and -1 if T could not be found in IFUN's EH region
171 table. */
172
173 int
174 lookup_stmt_eh_region_fn (struct function *ifun, gimple t)
175 {
176 struct throw_stmt_node *p, n;
177
178 if (!get_eh_throw_stmt_table (ifun))
179 return -2;
180
181 n.stmt = t;
182 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun), &n);
183 return (p ? p->region_nr : -1);
184 }
185
186
187 /* Determine if statement T is inside an EH region in the current
188 function (cfun). Return the EH region number if found, return -2
189 if cfun does not have an EH table and -1 if T could not be found in
190 cfun's EH region table. */
191
192 int
193 lookup_stmt_eh_region (gimple t)
194 {
195 /* We can get called from initialized data when -fnon-call-exceptions
196 is on; prevent crash. */
197 if (!cfun)
198 return -1;
199
200 return lookup_stmt_eh_region_fn (cfun, t);
201 }
202
203
204 /* Determine if expression T is inside an EH region in the current
205 function (cfun). Return the EH region number if found, return -2
206 if IFUN does not have an EH table and -1 if T could not be found in
207 IFUN's EH region table. */
208
209 int
210 lookup_expr_eh_region (tree t)
211 {
212 /* We can get called from initialized data when -fnon-call-exceptions
213 is on; prevent crash. */
214 if (!cfun)
215 return -1;
216
217 if (!get_eh_throw_stmt_table (cfun))
218 return -2;
219
220 if (t && EXPR_P (t))
221 {
222 tree_ann_common_t ann = tree_common_ann (t);
223 if (ann)
224 return (int) ann->rn;
225 }
226
227 return -1;
228 }
229
230
231 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
232 nodes and LABEL_DECL nodes. We will use this during the second phase to
233 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
234
235 struct finally_tree_node
236 {
237 /* When storing a GIMPLE_TRY, we have to record a gimple. However
238 when deciding whether a GOTO to a certain LABEL_DECL (which is a
239 tree) leaves the TRY block, its necessary to record a tree in
240 this field. Thus a treemple is used. */
241 treemple child;
242 gimple parent;
243 };
244
245 /* Note that this table is *not* marked GTY. It is short-lived. */
246 static htab_t finally_tree;
247
248 static void
249 record_in_finally_tree (treemple child, gimple parent)
250 {
251 struct finally_tree_node *n;
252 void **slot;
253
254 n = XNEW (struct finally_tree_node);
255 n->child = child;
256 n->parent = parent;
257
258 slot = htab_find_slot (finally_tree, n, INSERT);
259 gcc_assert (!*slot);
260 *slot = n;
261 }
262
263 static void
264 collect_finally_tree (gimple stmt, gimple region);
265
266 /* Go through the gimple sequence. Works with collect_finally_tree to
267 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
268
269 static void
270 collect_finally_tree_1 (gimple_seq seq, gimple region)
271 {
272 gimple_stmt_iterator gsi;
273
274 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
275 collect_finally_tree (gsi_stmt (gsi), region);
276 }
277
278 static void
279 collect_finally_tree (gimple stmt, gimple region)
280 {
281 treemple temp;
282
283 switch (gimple_code (stmt))
284 {
285 case GIMPLE_LABEL:
286 temp.t = gimple_label_label (stmt);
287 record_in_finally_tree (temp, region);
288 break;
289
290 case GIMPLE_TRY:
291 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
292 {
293 temp.g = stmt;
294 record_in_finally_tree (temp, region);
295 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
296 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
297 }
298 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
299 {
300 collect_finally_tree_1 (gimple_try_eval (stmt), region);
301 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
302 }
303 break;
304
305 case GIMPLE_CATCH:
306 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
307 break;
308
309 case GIMPLE_EH_FILTER:
310 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
311 break;
312
313 default:
314 /* A type, a decl, or some kind of statement that we're not
315 interested in. Don't walk them. */
316 break;
317 }
318 }
319
320
321 /* Use the finally tree to determine if a jump from START to TARGET
322 would leave the try_finally node that START lives in. */
323
324 static bool
325 outside_finally_tree (treemple start, gimple target)
326 {
327 struct finally_tree_node n, *p;
328
329 do
330 {
331 n.child = start;
332 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
333 if (!p)
334 return true;
335 start.g = p->parent;
336 }
337 while (start.g != target);
338
339 return false;
340 }
341
342 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
343 nodes into a set of gotos, magic labels, and eh regions.
344 The eh region creation is straight-forward, but frobbing all the gotos
345 and such into shape isn't. */
346
347 /* State of the world while lowering. */
348
349 struct leh_state
350 {
351 /* What's "current" while constructing the eh region tree. These
352 correspond to variables of the same name in cfun->eh, which we
353 don't have easy access to. */
354 struct eh_region *cur_region;
355 struct eh_region *prev_try;
356
357 /* Processing of TRY_FINALLY requires a bit more state. This is
358 split out into a separate structure so that we don't have to
359 copy so much when processing other nodes. */
360 struct leh_tf_state *tf;
361 };
362
363 struct leh_tf_state
364 {
365 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
366 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
367 this so that outside_finally_tree can reliably reference the tree used
368 in the collect_finally_tree data structures. */
369 gimple try_finally_expr;
370 gimple top_p;
371 /* While lowering a top_p usually it is expanded into multiple statements,
372 thus we need the following field to store them. */
373 gimple_seq top_p_seq;
374
375 /* The state outside this try_finally node. */
376 struct leh_state *outer;
377
378 /* The exception region created for it. */
379 struct eh_region *region;
380
381 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN statements
382 that are seen to escape this GIMPLE_TRY_FINALLY node.
383 The idea is to record a gimple statement for everything except for
384 the conditionals, which get their labels recorded. Since labels are of
385 type 'tree', we need this node to store both gimple and tree objects.
386 REPL_STMT is the sequence used to replace the goto/return statement.
387 CONT_STMT is used to store the statement that allows the return/goto to
388 jump to the original destination. */
389 struct goto_queue_node {
390 treemple stmt;
391 gimple_seq repl_stmt;
392 gimple cont_stmt;
393 int index;
394 /* this is used when index >= 0 to indicate that stmt is a label(as
395 opposed to a goto stmt) */
396 int is_label;
397 } *goto_queue;
398 size_t goto_queue_size;
399 size_t goto_queue_active;
400
401 /* Pointer map to help in searching goto_queue when it is large. */
402 struct pointer_map_t *goto_queue_map;
403
404 /* The set of unique labels seen as entries in the goto queue. */
405 VEC(tree,heap) *dest_array;
406
407 /* A label to be added at the end of the completed transformed
408 sequence. It will be set if may_fallthru was true *at one time*,
409 though subsequent transformations may have cleared that flag. */
410 tree fallthru_label;
411
412 /* A label that has been registered with except.c to be the
413 landing pad for this try block. */
414 tree eh_label;
415
416 /* True if it is possible to fall out the bottom of the try block.
417 Cleared if the fallthru is converted to a goto. */
418 bool may_fallthru;
419
420 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
421 bool may_return;
422
423 /* True if the finally block can receive an exception edge.
424 Cleared if the exception case is handled by code duplication. */
425 bool may_throw;
426 };
427
428 static gimple_seq lower_eh_filter (struct leh_state *, gimple);
429
430 /* Search for STMT in the goto queue. Return the replacement,
431 or null if the statement isn't in the queue. */
432
433 #define LARGE_GOTO_QUEUE 20
434
435 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
436
437 static gimple_seq
438 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
439 {
440 unsigned int i;
441 void **slot;
442
443 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
444 {
445 for (i = 0; i < tf->goto_queue_active; i++)
446 if ( tf->goto_queue[i].stmt.g == stmt.g)
447 return tf->goto_queue[i].repl_stmt;
448 return NULL;
449 }
450
451 /* If we have a large number of entries in the goto_queue, create a
452 pointer map and use that for searching. */
453
454 if (!tf->goto_queue_map)
455 {
456 tf->goto_queue_map = pointer_map_create ();
457 for (i = 0; i < tf->goto_queue_active; i++)
458 {
459 slot = pointer_map_insert (tf->goto_queue_map,
460 tf->goto_queue[i].stmt.g);
461 gcc_assert (*slot == NULL);
462 *slot = &tf->goto_queue[i];
463 }
464 }
465
466 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
467 if (slot != NULL)
468 return (((struct goto_queue_node *) *slot)->repl_stmt);
469
470 return NULL;
471 }
472
473 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
474 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
475 then we can just splat it in, otherwise we add the new stmts immediately
476 after the GIMPLE_COND and redirect. */
477
478 static void
479 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
480 gimple_stmt_iterator *gsi)
481 {
482 tree label;
483 gimple_seq new_seq;
484 treemple temp;
485
486 temp.tp = tp;
487 new_seq = find_goto_replacement (tf, temp);
488 if (!new_seq)
489 return;
490
491 if (gimple_seq_singleton_p (new_seq)
492 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
493 {
494 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
495 return;
496 }
497
498 label = create_artificial_label ();
499 /* Set the new label for the GIMPLE_COND */
500 *tp = label;
501
502 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
503 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
504 }
505
506 /* The real work of replace_goto_queue. Returns with TSI updated to
507 point to the next statement. */
508
509 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
510
511 static void
512 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
513 gimple_stmt_iterator *gsi)
514 {
515 gimple_seq seq;
516 treemple temp;
517 temp.g = NULL;
518
519 switch (gimple_code (stmt))
520 {
521 case GIMPLE_GOTO:
522 case GIMPLE_RETURN:
523 temp.g = stmt;
524 seq = find_goto_replacement (tf, temp);
525 if (seq)
526 {
527 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
528 gsi_remove (gsi, false);
529 return;
530 }
531 break;
532
533 case GIMPLE_COND:
534 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
535 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
536 break;
537
538 case GIMPLE_TRY:
539 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
540 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
541 break;
542 case GIMPLE_CATCH:
543 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
544 break;
545 case GIMPLE_EH_FILTER:
546 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
547 break;
548
549 default:
550 /* These won't have gotos in them. */
551 break;
552 }
553
554 gsi_next (gsi);
555 }
556
557 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
558
559 static void
560 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
561 {
562 gimple_stmt_iterator gsi = gsi_start (seq);
563
564 while (!gsi_end_p (gsi))
565 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
566 }
567
568 /* Replace all goto queue members. */
569
570 static void
571 replace_goto_queue (struct leh_tf_state *tf)
572 {
573 if (tf->goto_queue_active == 0)
574 return;
575 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
576 }
577
578 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
579 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
580 a gimple return. */
581
582 static void
583 record_in_goto_queue (struct leh_tf_state *tf,
584 treemple new_stmt,
585 int index,
586 bool is_label)
587 {
588 size_t active, size;
589 struct goto_queue_node *q;
590
591 gcc_assert (!tf->goto_queue_map);
592
593 active = tf->goto_queue_active;
594 size = tf->goto_queue_size;
595 if (active >= size)
596 {
597 size = (size ? size * 2 : 32);
598 tf->goto_queue_size = size;
599 tf->goto_queue
600 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
601 }
602
603 q = &tf->goto_queue[active];
604 tf->goto_queue_active = active + 1;
605
606 memset (q, 0, sizeof (*q));
607 q->stmt = new_stmt;
608 q->index = index;
609 q->is_label = is_label;
610 }
611
612 /* Record the LABEL label in the goto queue contained in TF.
613 TF is not null. */
614
615 static void
616 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
617 {
618 int index;
619 treemple temp, new_stmt;
620
621 if (!label)
622 return;
623
624 /* Computed and non-local gotos do not get processed. Given
625 their nature we can neither tell whether we've escaped the
626 finally block nor redirect them if we knew. */
627 if (TREE_CODE (label) != LABEL_DECL)
628 return;
629
630 /* No need to record gotos that don't leave the try block. */
631 temp.t = label;
632 if (!outside_finally_tree (temp, tf->try_finally_expr))
633 return;
634
635 if (! tf->dest_array)
636 {
637 tf->dest_array = VEC_alloc (tree, heap, 10);
638 VEC_quick_push (tree, tf->dest_array, label);
639 index = 0;
640 }
641 else
642 {
643 int n = VEC_length (tree, tf->dest_array);
644 for (index = 0; index < n; ++index)
645 if (VEC_index (tree, tf->dest_array, index) == label)
646 break;
647 if (index == n)
648 VEC_safe_push (tree, heap, tf->dest_array, label);
649 }
650
651 /* In the case of a GOTO we want to record the destination label,
652 since with a GIMPLE_COND we have an easy access to the then/else
653 labels. */
654 new_stmt = stmt;
655 record_in_goto_queue (tf, new_stmt, index, true);
656
657 }
658
659 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
660 node, and if so record that fact in the goto queue associated with that
661 try_finally node. */
662
663 static void
664 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
665 {
666 struct leh_tf_state *tf = state->tf;
667 treemple new_stmt;
668
669 if (!tf)
670 return;
671
672 switch (gimple_code (stmt))
673 {
674 case GIMPLE_COND:
675 new_stmt.tp = gimple_op_ptr (stmt, 2);
676 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
677 new_stmt.tp = gimple_op_ptr (stmt, 3);
678 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
679 break;
680 case GIMPLE_GOTO:
681 new_stmt.g = stmt;
682 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
683 break;
684
685 case GIMPLE_RETURN:
686 tf->may_return = true;
687 new_stmt.g = stmt;
688 record_in_goto_queue (tf, new_stmt, -1, false);
689 break;
690
691 default:
692 gcc_unreachable ();
693 }
694 }
695
696
697 #ifdef ENABLE_CHECKING
698 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
699 was in fact structured, and we've not yet done jump threading, then none
700 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
701
702 static void
703 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
704 {
705 struct leh_tf_state *tf = state->tf;
706 size_t i, n;
707
708 if (!tf)
709 return;
710
711 n = gimple_switch_num_labels (switch_expr);
712
713 for (i = 0; i < n; ++i)
714 {
715 treemple temp;
716 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
717 temp.t = lab;
718 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
719 }
720 }
721 #else
722 #define verify_norecord_switch_expr(state, switch_expr)
723 #endif
724
725 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
726 whatever is needed to finish the return. If MOD is non-null, insert it
727 before the new branch. RETURN_VALUE_P is a cache containing a temporary
728 variable to be used in manipulating the value returned from the function. */
729
730 static void
731 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
732 tree *return_value_p)
733 {
734 tree ret_expr;
735 gimple x;
736
737 /* In the case of a return, the queue node must be a gimple statement. */
738 gcc_assert (!q->is_label);
739
740 ret_expr = gimple_return_retval (q->stmt.g);
741
742 if (ret_expr)
743 {
744 if (!*return_value_p)
745 *return_value_p = ret_expr;
746 else
747 gcc_assert (*return_value_p == ret_expr);
748 q->cont_stmt = q->stmt.g;
749 /* The nasty part about redirecting the return value is that the
750 return value itself is to be computed before the FINALLY block
751 is executed. e.g.
752
753 int x;
754 int foo (void)
755 {
756 x = 0;
757 try {
758 return x;
759 } finally {
760 x++;
761 }
762 }
763
764 should return 0, not 1. Arrange for this to happen by copying
765 computed the return value into a local temporary. This also
766 allows us to redirect multiple return statements through the
767 same destination block; whether this is a net win or not really
768 depends, I guess, but it does make generation of the switch in
769 lower_try_finally_switch easier. */
770
771 if (TREE_CODE (ret_expr) == RESULT_DECL)
772 {
773 if (!*return_value_p)
774 *return_value_p = ret_expr;
775 else
776 gcc_assert (*return_value_p == ret_expr);
777 q->cont_stmt = q->stmt.g;
778 }
779 else
780 gcc_unreachable ();
781 }
782 else
783 /* If we don't return a value, all return statements are the same. */
784 q->cont_stmt = q->stmt.g;
785
786 if (!q->repl_stmt)
787 q->repl_stmt = gimple_seq_alloc ();
788
789 if (mod)
790 gimple_seq_add_seq (&q->repl_stmt, mod);
791
792 x = gimple_build_goto (finlab);
793 gimple_seq_add_stmt (&q->repl_stmt, x);
794 }
795
796 /* Similar, but easier, for GIMPLE_GOTO. */
797
798 static void
799 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
800 struct leh_tf_state *tf)
801 {
802 gimple x;
803
804 gcc_assert (q->is_label);
805 if (!q->repl_stmt)
806 q->repl_stmt = gimple_seq_alloc ();
807
808 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array,q->index));
809
810 if (mod)
811 gimple_seq_add_seq (&q->repl_stmt, mod);
812
813 x = gimple_build_goto (finlab);
814 gimple_seq_add_stmt (&q->repl_stmt, x);
815 }
816
817 /* We want to transform
818 try { body; } catch { stuff; }
819 to
820 body; goto over; lab: stuff; over:
821
822 TP is a GIMPLE_TRY node. LAB is the label that
823 should be placed before the second operand, or NULL. OVER is
824 an existing label that should be put at the exit, or NULL. */
825
826 static gimple_seq
827 frob_into_branch_around (gimple tp, tree lab, tree over)
828 {
829 gimple x;
830 gimple_seq cleanup, result;
831
832 cleanup = gimple_try_cleanup (tp);
833 result = gimple_try_eval (tp);
834
835 if (gimple_seq_may_fallthru (result))
836 {
837 if (!over)
838 over = create_artificial_label ();
839 x = gimple_build_goto (over);
840 gimple_seq_add_stmt (&result, x);
841 }
842
843 if (lab)
844 {
845 x = gimple_build_label (lab);
846 gimple_seq_add_stmt (&result, x);
847 }
848
849 gimple_seq_add_seq (&result, cleanup);
850
851 if (over)
852 {
853 x = gimple_build_label (over);
854 gimple_seq_add_stmt (&result, x);
855 }
856 return result;
857 }
858
859 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
860 Make sure to record all new labels found. */
861
862 static gimple_seq
863 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
864 {
865 gimple region = NULL;
866 gimple_seq new_seq;
867
868 new_seq = copy_gimple_seq_and_replace_locals (seq);
869
870 if (outer_state->tf)
871 region = outer_state->tf->try_finally_expr;
872 collect_finally_tree_1 (new_seq, region);
873
874 return new_seq;
875 }
876
877 /* A subroutine of lower_try_finally. Create a fallthru label for
878 the given try_finally state. The only tricky bit here is that
879 we have to make sure to record the label in our outer context. */
880
881 static tree
882 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
883 {
884 tree label = tf->fallthru_label;
885 treemple temp;
886
887 if (!label)
888 {
889 label = create_artificial_label ();
890 tf->fallthru_label = label;
891 if (tf->outer->tf)
892 {
893 temp.t = label;
894 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
895 }
896 }
897 return label;
898 }
899
900 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
901 returns non-null, then the language requires that the exception path out
902 of a try_finally be treated specially. To wit: the code within the
903 finally block may not itself throw an exception. We have two choices here.
904 First we can duplicate the finally block and wrap it in a must_not_throw
905 region. Second, we can generate code like
906
907 try {
908 finally_block;
909 } catch {
910 if (fintmp == eh_edge)
911 protect_cleanup_actions;
912 }
913
914 where "fintmp" is the temporary used in the switch statement generation
915 alternative considered below. For the nonce, we always choose the first
916 option.
917
918 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
919
920 static void
921 honor_protect_cleanup_actions (struct leh_state *outer_state,
922 struct leh_state *this_state,
923 struct leh_tf_state *tf)
924 {
925 gimple protect_cleanup_actions;
926 gimple_stmt_iterator gsi;
927 bool finally_may_fallthru;
928 gimple_seq finally;
929 gimple x;
930
931 /* First check for nothing to do. */
932 if (lang_protect_cleanup_actions)
933 protect_cleanup_actions = lang_protect_cleanup_actions ();
934 else
935 protect_cleanup_actions = NULL;
936
937 finally = gimple_try_cleanup (tf->top_p);
938
939 /* If the EH case of the finally block can fall through, this may be a
940 structure of the form
941 try {
942 try {
943 throw ...;
944 } cleanup {
945 try {
946 throw ...;
947 } catch (...) {
948 }
949 }
950 } catch (...) {
951 yyy;
952 }
953 E.g. with an inline destructor with an embedded try block. In this
954 case we must save the runtime EH data around the nested exception.
955
956 This complication means that any time the previous runtime data might
957 be used (via fallthru from the finally) we handle the eh case here,
958 whether or not protect_cleanup_actions is active. */
959
960 finally_may_fallthru = gimple_seq_may_fallthru (finally);
961 if (!finally_may_fallthru && !protect_cleanup_actions)
962 return;
963
964 /* Duplicate the FINALLY block. Only need to do this for try-finally,
965 and not for cleanups. */
966 if (this_state)
967 finally = lower_try_finally_dup_block (finally, outer_state);
968
969 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
970 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
971 to be in an enclosing scope, but needs to be implemented at this level
972 to avoid a nesting violation (see wrap_temporary_cleanups in
973 cp/decl.c). Since it's logically at an outer level, we should call
974 terminate before we get to it, so strip it away before adding the
975 MUST_NOT_THROW filter. */
976 gsi = gsi_start (finally);
977 x = gsi_stmt (gsi);
978 if (protect_cleanup_actions
979 && gimple_code (x) == GIMPLE_TRY
980 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
981 && gimple_try_catch_is_cleanup (x))
982 {
983 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
984 gsi_remove (&gsi, false);
985 }
986
987 /* Resume execution after the exception. Adding this now lets
988 lower_eh_filter not add unnecessary gotos, as it is clear that
989 we never fallthru from this copy of the finally block. */
990 if (finally_may_fallthru)
991 {
992 tree save_eptr, save_filt;
993 tree tmp;
994
995 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
996 save_filt = create_tmp_var (integer_type_node, "save_filt");
997
998 gsi = gsi_start (finally);
999 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1000 x = gimple_build_assign (save_eptr, tmp);
1001 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1002
1003 tmp = build0 (FILTER_EXPR, integer_type_node);
1004 x = gimple_build_assign (save_filt, tmp);
1005 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1006
1007 gsi = gsi_last (finally);
1008 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1009 x = gimple_build_assign (tmp, save_eptr);
1010 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1011
1012 tmp = build0 (FILTER_EXPR, integer_type_node);
1013 x = gimple_build_assign (tmp, save_filt);
1014 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1015
1016 x = gimple_build_resx (get_eh_region_number (tf->region));
1017 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1018 }
1019
1020 /* Wrap the block with protect_cleanup_actions as the action. */
1021 if (protect_cleanup_actions)
1022 {
1023 gimple_seq seq = NULL, failure = NULL;
1024
1025 gimple_seq_add_stmt (&failure, protect_cleanup_actions);
1026 x = gimple_build_eh_filter (NULL, failure);
1027 gimple_eh_filter_set_must_not_throw (x, 1);
1028
1029 gimple_seq_add_stmt (&seq, x);
1030 x = gimple_build_try (finally, seq, GIMPLE_TRY_CATCH);
1031 finally = lower_eh_filter (outer_state, x);
1032 }
1033 else
1034 lower_eh_constructs_1 (outer_state, finally);
1035
1036 /* Hook this up to the end of the existing try block. If we
1037 previously fell through the end, we'll have to branch around.
1038 This means adding a new goto, and adding it to the queue. */
1039
1040 gsi = gsi_last (gimple_try_eval (tf->top_p));
1041
1042 if (tf->may_fallthru)
1043 {
1044 tree tmp;
1045 tmp = lower_try_finally_fallthru_label (tf);
1046 x = gimple_build_goto (tmp);
1047 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1048
1049 if (this_state)
1050 maybe_record_in_goto_queue (this_state, x);
1051
1052 tf->may_fallthru = false;
1053 }
1054
1055 x = gimple_build_label (tf->eh_label);
1056 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1057 gsi_insert_seq_after (&gsi, finally, GSI_CONTINUE_LINKING);
1058
1059 /* Having now been handled, EH isn't to be considered with
1060 the rest of the outgoing edges. */
1061 tf->may_throw = false;
1062 }
1063
1064 /* A subroutine of lower_try_finally. We have determined that there is
1065 no fallthru edge out of the finally block. This means that there is
1066 no outgoing edge corresponding to any incoming edge. Restructure the
1067 try_finally node for this special case. */
1068
1069 static void
1070 lower_try_finally_nofallthru (struct leh_state *state,
1071 struct leh_tf_state *tf)
1072 {
1073 tree lab, return_val;
1074 gimple x;
1075 gimple_seq finally;
1076 struct goto_queue_node *q, *qe;
1077
1078 if (tf->may_throw)
1079 lab = tf->eh_label;
1080 else
1081 lab = create_artificial_label ();
1082
1083 /* We expect that tf->top_p is a GIMPLE_TRY. */
1084 finally = gimple_try_cleanup (tf->top_p);
1085 tf->top_p_seq = gimple_try_eval (tf->top_p);
1086
1087 x = gimple_build_label (lab);
1088 gimple_seq_add_stmt (&tf->top_p_seq, x);
1089
1090 return_val = NULL;
1091 q = tf->goto_queue;
1092 qe = q + tf->goto_queue_active;
1093 for (; q < qe; ++q)
1094 if (q->index < 0)
1095 do_return_redirection (q, lab, NULL, &return_val);
1096 else
1097 do_goto_redirection (q, lab, NULL, tf);
1098
1099 replace_goto_queue (tf);
1100
1101 lower_eh_constructs_1 (state, finally);
1102 gimple_seq_add_seq (&tf->top_p_seq, finally);
1103 }
1104
1105 /* A subroutine of lower_try_finally. We have determined that there is
1106 exactly one destination of the finally block. Restructure the
1107 try_finally node for this special case. */
1108
1109 static void
1110 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1111 {
1112 struct goto_queue_node *q, *qe;
1113 gimple x;
1114 gimple_seq finally;
1115 tree finally_label;
1116
1117 finally = gimple_try_cleanup (tf->top_p);
1118 tf->top_p_seq = gimple_try_eval (tf->top_p);
1119
1120 lower_eh_constructs_1 (state, finally);
1121
1122 if (tf->may_throw)
1123 {
1124 /* Only reachable via the exception edge. Add the given label to
1125 the head of the FINALLY block. Append a RESX at the end. */
1126
1127 x = gimple_build_label (tf->eh_label);
1128 gimple_seq_add_stmt (&tf->top_p_seq, x);
1129
1130 gimple_seq_add_seq (&tf->top_p_seq, finally);
1131
1132 x = gimple_build_resx (get_eh_region_number (tf->region));
1133
1134 gimple_seq_add_stmt (&tf->top_p_seq, x);
1135
1136 return;
1137 }
1138
1139 if (tf->may_fallthru)
1140 {
1141 /* Only reachable via the fallthru edge. Do nothing but let
1142 the two blocks run together; we'll fall out the bottom. */
1143 gimple_seq_add_seq (&tf->top_p_seq, finally);
1144 return;
1145 }
1146
1147 finally_label = create_artificial_label ();
1148 x = gimple_build_label (finally_label);
1149 gimple_seq_add_stmt (&tf->top_p_seq, x);
1150
1151 gimple_seq_add_seq (&tf->top_p_seq, finally);
1152
1153 q = tf->goto_queue;
1154 qe = q + tf->goto_queue_active;
1155
1156 if (tf->may_return)
1157 {
1158 /* Reachable by return expressions only. Redirect them. */
1159 tree return_val = NULL;
1160 for (; q < qe; ++q)
1161 do_return_redirection (q, finally_label, NULL, &return_val);
1162 replace_goto_queue (tf);
1163 }
1164 else
1165 {
1166 /* Reachable by goto expressions only. Redirect them. */
1167 for (; q < qe; ++q)
1168 do_goto_redirection (q, finally_label, NULL, tf);
1169 replace_goto_queue (tf);
1170
1171 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1172 {
1173 /* Reachable by goto to fallthru label only. Redirect it
1174 to the new label (already created, sadly), and do not
1175 emit the final branch out, or the fallthru label. */
1176 tf->fallthru_label = NULL;
1177 return;
1178 }
1179 }
1180
1181 /* Place the original return/goto to the original destination
1182 immediately after the finally block. */
1183 x = tf->goto_queue[0].cont_stmt;
1184 gimple_seq_add_stmt (&tf->top_p_seq, x);
1185 maybe_record_in_goto_queue (state, x);
1186 }
1187
1188 /* A subroutine of lower_try_finally. There are multiple edges incoming
1189 and outgoing from the finally block. Implement this by duplicating the
1190 finally block for every destination. */
1191
1192 static void
1193 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1194 {
1195 gimple_seq finally;
1196 gimple_seq new_stmt;
1197 gimple_seq seq;
1198 gimple x;
1199 tree tmp;
1200
1201 finally = gimple_try_cleanup (tf->top_p);
1202 tf->top_p_seq = gimple_try_eval (tf->top_p);
1203 new_stmt = NULL;
1204
1205 if (tf->may_fallthru)
1206 {
1207 seq = lower_try_finally_dup_block (finally, state);
1208 lower_eh_constructs_1 (state, seq);
1209 gimple_seq_add_seq (&new_stmt, seq);
1210
1211 tmp = lower_try_finally_fallthru_label (tf);
1212 x = gimple_build_goto (tmp);
1213 gimple_seq_add_stmt (&new_stmt, x);
1214 }
1215
1216 if (tf->may_throw)
1217 {
1218 x = gimple_build_label (tf->eh_label);
1219 gimple_seq_add_stmt (&new_stmt, x);
1220
1221 seq = lower_try_finally_dup_block (finally, state);
1222 lower_eh_constructs_1 (state, seq);
1223 gimple_seq_add_seq (&new_stmt, seq);
1224
1225 x = gimple_build_resx (get_eh_region_number (tf->region));
1226 gimple_seq_add_stmt (&new_stmt, x);
1227 }
1228
1229 if (tf->goto_queue)
1230 {
1231 struct goto_queue_node *q, *qe;
1232 tree return_val = NULL;
1233 int return_index, index;
1234 struct labels_s
1235 {
1236 struct goto_queue_node *q;
1237 tree label;
1238 } *labels;
1239
1240 return_index = VEC_length (tree, tf->dest_array);
1241 labels = XCNEWVEC (struct labels_s, return_index + 1);
1242
1243 q = tf->goto_queue;
1244 qe = q + tf->goto_queue_active;
1245 for (; q < qe; q++)
1246 {
1247 index = q->index < 0 ? return_index : q->index;
1248
1249 if (!labels[index].q)
1250 labels[index].q = q;
1251 }
1252
1253 for (index = 0; index < return_index + 1; index++)
1254 {
1255 tree lab;
1256
1257 q = labels[index].q;
1258 if (! q)
1259 continue;
1260
1261 lab = labels[index].label = create_artificial_label ();
1262
1263 if (index == return_index)
1264 do_return_redirection (q, lab, NULL, &return_val);
1265 else
1266 do_goto_redirection (q, lab, NULL, tf);
1267
1268 x = gimple_build_label (lab);
1269 gimple_seq_add_stmt (&new_stmt, x);
1270
1271 seq = lower_try_finally_dup_block (finally, state);
1272 lower_eh_constructs_1 (state, seq);
1273 gimple_seq_add_seq (&new_stmt, seq);
1274
1275 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1276 maybe_record_in_goto_queue (state, q->cont_stmt);
1277 }
1278
1279 for (q = tf->goto_queue; q < qe; q++)
1280 {
1281 tree lab;
1282
1283 index = q->index < 0 ? return_index : q->index;
1284
1285 if (labels[index].q == q)
1286 continue;
1287
1288 lab = labels[index].label;
1289
1290 if (index == return_index)
1291 do_return_redirection (q, lab, NULL, &return_val);
1292 else
1293 do_goto_redirection (q, lab, NULL, tf);
1294 }
1295
1296 replace_goto_queue (tf);
1297 free (labels);
1298 }
1299
1300 /* Need to link new stmts after running replace_goto_queue due
1301 to not wanting to process the same goto stmts twice. */
1302 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1303 }
1304
1305 /* A subroutine of lower_try_finally. There are multiple edges incoming
1306 and outgoing from the finally block. Implement this by instrumenting
1307 each incoming edge and creating a switch statement at the end of the
1308 finally block that branches to the appropriate destination. */
1309
1310 static void
1311 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1312 {
1313 struct goto_queue_node *q, *qe;
1314 tree return_val = NULL;
1315 tree finally_tmp, finally_label;
1316 int return_index, eh_index, fallthru_index;
1317 int nlabels, ndests, j, last_case_index;
1318 tree last_case;
1319 VEC (tree,heap) *case_label_vec;
1320 gimple_seq switch_body;
1321 gimple x;
1322 tree tmp;
1323 gimple switch_stmt;
1324 gimple_seq finally;
1325 struct pointer_map_t *cont_map = NULL;
1326
1327 switch_body = gimple_seq_alloc ();
1328
1329 /* Mash the TRY block to the head of the chain. */
1330 finally = gimple_try_cleanup (tf->top_p);
1331 tf->top_p_seq = gimple_try_eval (tf->top_p);
1332
1333 /* Lower the finally block itself. */
1334 lower_eh_constructs_1 (state, finally);
1335
1336 /* Prepare for switch statement generation. */
1337 nlabels = VEC_length (tree, tf->dest_array);
1338 return_index = nlabels;
1339 eh_index = return_index + tf->may_return;
1340 fallthru_index = eh_index + tf->may_throw;
1341 ndests = fallthru_index + tf->may_fallthru;
1342
1343 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1344 finally_label = create_artificial_label ();
1345
1346 /* We use VEC_quick_push on case_label_vec throughout this function,
1347 since we know the size in advance and allocate precisely as muce
1348 space as needed. */
1349 case_label_vec = VEC_alloc (tree, heap, ndests);
1350 last_case = NULL;
1351 last_case_index = 0;
1352
1353 /* Begin inserting code for getting to the finally block. Things
1354 are done in this order to correspond to the sequence the code is
1355 layed out. */
1356
1357 if (tf->may_fallthru)
1358 {
1359 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
1360 fallthru_index));
1361 gimple_seq_add_stmt (&tf->top_p_seq, x);
1362
1363 if (tf->may_throw)
1364 {
1365 x = gimple_build_goto (finally_label);
1366 gimple_seq_add_stmt (&tf->top_p_seq, x);
1367 }
1368
1369
1370 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1371 build_int_cst (NULL_TREE, fallthru_index), NULL,
1372 create_artificial_label ());
1373 VEC_quick_push (tree, case_label_vec, last_case);
1374 last_case_index++;
1375
1376 x = gimple_build_label (CASE_LABEL (last_case));
1377 gimple_seq_add_stmt (&switch_body, x);
1378
1379 tmp = lower_try_finally_fallthru_label (tf);
1380 x = gimple_build_goto (tmp);
1381 gimple_seq_add_stmt (&switch_body, x);
1382 }
1383
1384 if (tf->may_throw)
1385 {
1386 x = gimple_build_label (tf->eh_label);
1387 gimple_seq_add_stmt (&tf->top_p_seq, x);
1388
1389 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
1390 eh_index));
1391 gimple_seq_add_stmt (&tf->top_p_seq, x);
1392
1393 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1394 build_int_cst (NULL_TREE, eh_index), NULL,
1395 create_artificial_label ());
1396 VEC_quick_push (tree, case_label_vec, last_case);
1397 last_case_index++;
1398
1399 x = gimple_build_label (CASE_LABEL (last_case));
1400 gimple_seq_add_stmt (&switch_body, x);
1401 x = gimple_build_resx (get_eh_region_number (tf->region));
1402 gimple_seq_add_stmt (&switch_body, x);
1403 }
1404
1405 x = gimple_build_label (finally_label);
1406 gimple_seq_add_stmt (&tf->top_p_seq, x);
1407
1408 gimple_seq_add_seq (&tf->top_p_seq, finally);
1409
1410 /* Redirect each incoming goto edge. */
1411 q = tf->goto_queue;
1412 qe = q + tf->goto_queue_active;
1413 j = last_case_index + tf->may_return;
1414 /* Prepare the assignments to finally_tmp that are executed upon the
1415 entrance through a particular edge. */
1416 for (; q < qe; ++q)
1417 {
1418 gimple_seq mod;
1419 int switch_id;
1420 unsigned int case_index;
1421
1422 mod = gimple_seq_alloc ();
1423
1424 if (q->index < 0)
1425 {
1426 x = gimple_build_assign (finally_tmp,
1427 build_int_cst (integer_type_node,
1428 return_index));
1429 gimple_seq_add_stmt (&mod, x);
1430 do_return_redirection (q, finally_label, mod, &return_val);
1431 switch_id = return_index;
1432 }
1433 else
1434 {
1435 x = gimple_build_assign (finally_tmp,
1436 build_int_cst (integer_type_node, q->index));
1437 gimple_seq_add_stmt (&mod, x);
1438 do_goto_redirection (q, finally_label, mod, tf);
1439 switch_id = q->index;
1440 }
1441
1442 case_index = j + q->index;
1443 if (VEC_length (tree, case_label_vec) <= case_index
1444 || !VEC_index (tree, case_label_vec, case_index))
1445 {
1446 tree case_lab;
1447 void **slot;
1448 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1449 build_int_cst (NULL_TREE, switch_id), NULL,
1450 NULL);
1451 /* We store the cont_stmt in the pointer map, so that we can recover
1452 it in the loop below. We don't create the new label while
1453 walking the goto_queue because pointers don't offer a stable
1454 order. */
1455 if (!cont_map)
1456 cont_map = pointer_map_create ();
1457 slot = pointer_map_insert (cont_map, case_lab);
1458 *slot = q->cont_stmt;
1459 VEC_quick_push (tree, case_label_vec, case_lab);
1460 }
1461 }
1462 for (j = last_case_index; j < last_case_index + nlabels; j++)
1463 {
1464 tree label;
1465 gimple cont_stmt;
1466 void **slot;
1467
1468 last_case = VEC_index (tree, case_label_vec, j);
1469
1470 gcc_assert (last_case);
1471 gcc_assert (cont_map);
1472
1473 slot = pointer_map_contains (cont_map, last_case);
1474 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1475 placeholder, it does not store an actual label, yet. */
1476 gcc_assert (slot);
1477 cont_stmt = *(gimple *) slot;
1478
1479 label = create_artificial_label ();
1480 CASE_LABEL (last_case) = label;
1481
1482 x = gimple_build_label (label);
1483 gimple_seq_add_stmt (&switch_body, x);
1484 gimple_seq_add_stmt (&switch_body, cont_stmt);
1485 maybe_record_in_goto_queue (state, cont_stmt);
1486 }
1487 if (cont_map)
1488 pointer_map_destroy (cont_map);
1489
1490 replace_goto_queue (tf);
1491
1492 /* Make sure that the last case is the default label, as one is required.
1493 Then sort the labels, which is also required in GIMPLE. */
1494 CASE_LOW (last_case) = NULL;
1495 sort_case_labels (case_label_vec);
1496
1497 /* Build the switch statement, setting last_case to be the default
1498 label. */
1499 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1500 case_label_vec);
1501
1502 /* Need to link SWITCH_STMT after running replace_goto_queue
1503 due to not wanting to process the same goto stmts twice. */
1504 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1505 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1506 }
1507
1508 /* Decide whether or not we are going to duplicate the finally block.
1509 There are several considerations.
1510
1511 First, if this is Java, then the finally block contains code
1512 written by the user. It has line numbers associated with it,
1513 so duplicating the block means it's difficult to set a breakpoint.
1514 Since controlling code generation via -g is verboten, we simply
1515 never duplicate code without optimization.
1516
1517 Second, we'd like to prevent egregious code growth. One way to
1518 do this is to estimate the size of the finally block, multiply
1519 that by the number of copies we'd need to make, and compare against
1520 the estimate of the size of the switch machinery we'd have to add. */
1521
1522 static bool
1523 decide_copy_try_finally (int ndests, gimple_seq finally)
1524 {
1525 int f_estimate, sw_estimate;
1526
1527 if (!optimize)
1528 return false;
1529
1530 /* Finally estimate N times, plus N gotos. */
1531 f_estimate = count_insns_seq (finally, &eni_size_weights);
1532 f_estimate = (f_estimate + 1) * ndests;
1533
1534 /* Switch statement (cost 10), N variable assignments, N gotos. */
1535 sw_estimate = 10 + 2 * ndests;
1536
1537 /* Optimize for size clearly wants our best guess. */
1538 if (optimize_function_for_size_p (cfun))
1539 return f_estimate < sw_estimate;
1540
1541 /* ??? These numbers are completely made up so far. */
1542 if (optimize > 1)
1543 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1544 else
1545 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1546 }
1547
1548
1549 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1550 to a sequence of labels and blocks, plus the exception region trees
1551 that record all the magic. This is complicated by the need to
1552 arrange for the FINALLY block to be executed on all exits. */
1553
1554 static gimple_seq
1555 lower_try_finally (struct leh_state *state, gimple tp)
1556 {
1557 struct leh_tf_state this_tf;
1558 struct leh_state this_state;
1559 int ndests;
1560
1561 /* Process the try block. */
1562
1563 memset (&this_tf, 0, sizeof (this_tf));
1564 this_tf.try_finally_expr = tp;
1565 this_tf.top_p = tp;
1566 this_tf.outer = state;
1567 if (using_eh_for_cleanups_p)
1568 this_tf.region
1569 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1570 else
1571 this_tf.region = NULL;
1572
1573 this_state.cur_region = this_tf.region;
1574 this_state.prev_try = state->prev_try;
1575 this_state.tf = &this_tf;
1576
1577 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1578
1579 /* Determine if the try block is escaped through the bottom. */
1580 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1581
1582 /* Determine if any exceptions are possible within the try block. */
1583 if (using_eh_for_cleanups_p)
1584 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1585 if (this_tf.may_throw)
1586 {
1587 this_tf.eh_label = create_artificial_label ();
1588 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1589 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1590 }
1591
1592 /* Determine how many edges (still) reach the finally block. Or rather,
1593 how many destinations are reached by the finally block. Use this to
1594 determine how we process the finally block itself. */
1595
1596 ndests = VEC_length (tree, this_tf.dest_array);
1597 ndests += this_tf.may_fallthru;
1598 ndests += this_tf.may_return;
1599 ndests += this_tf.may_throw;
1600
1601 /* If the FINALLY block is not reachable, dike it out. */
1602 if (ndests == 0)
1603 {
1604 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1605 gimple_try_set_cleanup (tp, NULL);
1606 }
1607 /* If the finally block doesn't fall through, then any destination
1608 we might try to impose there isn't reached either. There may be
1609 some minor amount of cleanup and redirection still needed. */
1610 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1611 lower_try_finally_nofallthru (state, &this_tf);
1612
1613 /* We can easily special-case redirection to a single destination. */
1614 else if (ndests == 1)
1615 lower_try_finally_onedest (state, &this_tf);
1616 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1617 lower_try_finally_copy (state, &this_tf);
1618 else
1619 lower_try_finally_switch (state, &this_tf);
1620
1621 /* If someone requested we add a label at the end of the transformed
1622 block, do so. */
1623 if (this_tf.fallthru_label)
1624 {
1625 /* This must be reached only if ndests == 0. */
1626 gimple x = gimple_build_label (this_tf.fallthru_label);
1627 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1628 }
1629
1630 VEC_free (tree, heap, this_tf.dest_array);
1631 if (this_tf.goto_queue)
1632 free (this_tf.goto_queue);
1633 if (this_tf.goto_queue_map)
1634 pointer_map_destroy (this_tf.goto_queue_map);
1635
1636 return this_tf.top_p_seq;
1637 }
1638
1639 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1640 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1641 exception region trees that records all the magic. */
1642
1643 static gimple_seq
1644 lower_catch (struct leh_state *state, gimple tp)
1645 {
1646 struct eh_region *try_region;
1647 struct leh_state this_state;
1648 gimple_stmt_iterator gsi;
1649 tree out_label;
1650
1651 try_region = gen_eh_region_try (state->cur_region);
1652 this_state.cur_region = try_region;
1653 this_state.prev_try = try_region;
1654 this_state.tf = state->tf;
1655
1656 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1657
1658 if (!get_eh_region_may_contain_throw (try_region))
1659 {
1660 return gimple_try_eval (tp);
1661 }
1662
1663 out_label = NULL;
1664 for (gsi = gsi_start (gimple_try_cleanup (tp)); !gsi_end_p (gsi); )
1665 {
1666 struct eh_region *catch_region;
1667 tree eh_label;
1668 gimple x, gcatch;
1669
1670 gcatch = gsi_stmt (gsi);
1671 catch_region = gen_eh_region_catch (try_region,
1672 gimple_catch_types (gcatch));
1673
1674 this_state.cur_region = catch_region;
1675 this_state.prev_try = state->prev_try;
1676 lower_eh_constructs_1 (&this_state, gimple_catch_handler (gcatch));
1677
1678 eh_label = create_artificial_label ();
1679 set_eh_region_tree_label (catch_region, eh_label);
1680
1681 x = gimple_build_label (eh_label);
1682 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
1683
1684 if (gimple_seq_may_fallthru (gimple_catch_handler (gcatch)))
1685 {
1686 if (!out_label)
1687 out_label = create_artificial_label ();
1688
1689 x = gimple_build_goto (out_label);
1690 gimple_seq_add_stmt (gimple_catch_handler_ptr (gcatch), x);
1691 }
1692
1693 gsi_insert_seq_before (&gsi, gimple_catch_handler (gcatch),
1694 GSI_SAME_STMT);
1695 gsi_remove (&gsi, false);
1696 }
1697
1698 return frob_into_branch_around (tp, NULL, out_label);
1699 }
1700
1701 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1702 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1703 region trees that record all the magic. */
1704
1705 static gimple_seq
1706 lower_eh_filter (struct leh_state *state, gimple tp)
1707 {
1708 struct leh_state this_state;
1709 struct eh_region *this_region;
1710 gimple inner;
1711 tree eh_label;
1712
1713 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1714
1715 if (gimple_eh_filter_must_not_throw (inner))
1716 this_region = gen_eh_region_must_not_throw (state->cur_region);
1717 else
1718 this_region = gen_eh_region_allowed (state->cur_region,
1719 gimple_eh_filter_types (inner));
1720 this_state = *state;
1721 this_state.cur_region = this_region;
1722 /* For must not throw regions any cleanup regions inside it
1723 can't reach outer catch regions. */
1724 if (gimple_eh_filter_must_not_throw (inner))
1725 this_state.prev_try = NULL;
1726
1727 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1728
1729 if (!get_eh_region_may_contain_throw (this_region))
1730 {
1731 return gimple_try_eval (tp);
1732 }
1733
1734 lower_eh_constructs_1 (state, gimple_eh_filter_failure (inner));
1735 gimple_try_set_cleanup (tp, gimple_eh_filter_failure (inner));
1736
1737 eh_label = create_artificial_label ();
1738 set_eh_region_tree_label (this_region, eh_label);
1739
1740 return frob_into_branch_around (tp, eh_label, NULL);
1741 }
1742
1743 /* Implement a cleanup expression. This is similar to try-finally,
1744 except that we only execute the cleanup block for exception edges. */
1745
1746 static gimple_seq
1747 lower_cleanup (struct leh_state *state, gimple tp)
1748 {
1749 struct leh_state this_state;
1750 struct eh_region *this_region;
1751 struct leh_tf_state fake_tf;
1752 gimple_seq result;
1753
1754 /* If not using eh, then exception-only cleanups are no-ops. */
1755 if (!flag_exceptions)
1756 {
1757 result = gimple_try_eval (tp);
1758 lower_eh_constructs_1 (state, result);
1759 return result;
1760 }
1761
1762 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1763 this_state = *state;
1764 this_state.cur_region = this_region;
1765
1766 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1767
1768 if (!get_eh_region_may_contain_throw (this_region))
1769 {
1770 return gimple_try_eval (tp);
1771 }
1772
1773 /* Build enough of a try-finally state so that we can reuse
1774 honor_protect_cleanup_actions. */
1775 memset (&fake_tf, 0, sizeof (fake_tf));
1776 fake_tf.top_p = tp;
1777 fake_tf.outer = state;
1778 fake_tf.region = this_region;
1779 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1780 fake_tf.may_throw = true;
1781
1782 fake_tf.eh_label = create_artificial_label ();
1783 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1784
1785 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1786
1787 if (fake_tf.may_throw)
1788 {
1789 /* In this case honor_protect_cleanup_actions had nothing to do,
1790 and we should process this normally. */
1791 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1792 result = frob_into_branch_around (tp, fake_tf.eh_label,
1793 fake_tf.fallthru_label);
1794 }
1795 else
1796 {
1797 /* In this case honor_protect_cleanup_actions did nearly all of
1798 the work. All we have left is to append the fallthru_label. */
1799
1800 result = gimple_try_eval (tp);
1801 if (fake_tf.fallthru_label)
1802 {
1803 gimple x = gimple_build_label (fake_tf.fallthru_label);
1804 gimple_seq_add_stmt (&result, x);
1805 }
1806 }
1807 return result;
1808 }
1809
1810
1811
1812 /* Main loop for lowering eh constructs. Also moves gsi to the next
1813 statement. */
1814
1815 static void
1816 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1817 {
1818 gimple_seq replace;
1819 gimple x;
1820 gimple stmt = gsi_stmt (*gsi);
1821
1822 switch (gimple_code (stmt))
1823 {
1824 case GIMPLE_CALL:
1825 case GIMPLE_ASSIGN:
1826 /* If the stmt can throw use a new temporary for the assignment
1827 to a LHS. This makes sure the old value of the LHS is
1828 available on the EH edge. */
1829 if (stmt_could_throw_p (stmt)
1830 && gimple_has_lhs (stmt)
1831 && !tree_could_throw_p (gimple_get_lhs (stmt))
1832 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1833 {
1834 tree lhs = gimple_get_lhs (stmt);
1835 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1836 gimple s = gimple_build_assign (lhs, tmp);
1837 gimple_set_location (s, gimple_location (stmt));
1838 gimple_set_block (s, gimple_block (stmt));
1839 gimple_set_lhs (stmt, tmp);
1840 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1841 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1842 DECL_GIMPLE_REG_P (tmp) = 1;
1843 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1844 }
1845 /* Look for things that can throw exceptions, and record them. */
1846 if (state->cur_region && stmt_could_throw_p (stmt))
1847 {
1848 record_stmt_eh_region (state->cur_region, stmt);
1849 note_eh_region_may_contain_throw (state->cur_region);
1850 }
1851 break;
1852
1853 case GIMPLE_COND:
1854 case GIMPLE_GOTO:
1855 case GIMPLE_RETURN:
1856 maybe_record_in_goto_queue (state, stmt);
1857 break;
1858
1859 case GIMPLE_SWITCH:
1860 verify_norecord_switch_expr (state, stmt);
1861 break;
1862
1863 case GIMPLE_TRY:
1864 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1865 replace = lower_try_finally (state, stmt);
1866 else
1867 {
1868 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1869 switch (gimple_code (x))
1870 {
1871 case GIMPLE_CATCH:
1872 replace = lower_catch (state, stmt);
1873 break;
1874 case GIMPLE_EH_FILTER:
1875 replace = lower_eh_filter (state, stmt);
1876 break;
1877 default:
1878 replace = lower_cleanup (state, stmt);
1879 break;
1880 }
1881 }
1882
1883 /* Remove the old stmt and insert the transformed sequence
1884 instead. */
1885 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1886 gsi_remove (gsi, true);
1887
1888 /* Return since we don't want gsi_next () */
1889 return;
1890
1891 default:
1892 /* A type, a decl, or some kind of statement that we're not
1893 interested in. Don't walk them. */
1894 break;
1895 }
1896
1897 gsi_next (gsi);
1898 }
1899
1900 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1901
1902 static void
1903 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
1904 {
1905 gimple_stmt_iterator gsi;
1906 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
1907 lower_eh_constructs_2 (state, &gsi);
1908 }
1909
1910 static unsigned int
1911 lower_eh_constructs (void)
1912 {
1913 struct leh_state null_state;
1914
1915 gimple_seq bodyp = gimple_body (current_function_decl);
1916
1917 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1918
1919 collect_finally_tree_1 (bodyp, NULL);
1920
1921 memset (&null_state, 0, sizeof (null_state));
1922 lower_eh_constructs_1 (&null_state, bodyp);
1923
1924 htab_delete (finally_tree);
1925
1926 collect_eh_region_array ();
1927 return 0;
1928 }
1929
1930 struct gimple_opt_pass pass_lower_eh =
1931 {
1932 {
1933 GIMPLE_PASS,
1934 "eh", /* name */
1935 NULL, /* gate */
1936 lower_eh_constructs, /* execute */
1937 NULL, /* sub */
1938 NULL, /* next */
1939 0, /* static_pass_number */
1940 TV_TREE_EH, /* tv_id */
1941 PROP_gimple_lcf, /* properties_required */
1942 PROP_gimple_leh, /* properties_provided */
1943 0, /* properties_destroyed */
1944 0, /* todo_flags_start */
1945 TODO_dump_func /* todo_flags_finish */
1946 }
1947 };
1948
1949 \f
1950 /* Construct EH edges for STMT. */
1951
1952 static void
1953 make_eh_edge (struct eh_region *region, void *data)
1954 {
1955 gimple stmt;
1956 tree lab;
1957 basic_block src, dst;
1958
1959 stmt = (gimple) data;
1960 lab = get_eh_region_tree_label (region);
1961
1962 src = gimple_bb (stmt);
1963 dst = label_to_block (lab);
1964
1965 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1966 }
1967
1968 /* See if STMT is call that might be inlined. */
1969
1970 static bool
1971 inlinable_call_p (gimple stmt)
1972 {
1973 tree decl;
1974 if (gimple_code (stmt) != GIMPLE_CALL)
1975 return false;
1976 if (cfun->after_inlining)
1977 return false;
1978 /* Indirect calls can be propagated to direct call
1979 and inlined. */
1980 decl = gimple_call_fndecl (stmt);
1981 if (!decl)
1982 return true;
1983 if (cgraph_function_flags_ready
1984 && cgraph_function_body_availability (cgraph_node (decl))
1985 < AVAIL_OVERWRITABLE)
1986 return false;
1987 return !DECL_UNINLINABLE (decl);
1988 }
1989
1990 void
1991 make_eh_edges (gimple stmt)
1992 {
1993 int region_nr;
1994 bool is_resx;
1995 bool inlinable = false;
1996 basic_block bb;
1997
1998 if (gimple_code (stmt) == GIMPLE_RESX)
1999 {
2000 region_nr = gimple_resx_region (stmt);
2001 is_resx = true;
2002 }
2003 else
2004 {
2005 region_nr = lookup_stmt_eh_region (stmt);
2006 if (region_nr < 0)
2007 return;
2008 is_resx = false;
2009 inlinable = inlinable_call_p (stmt);
2010 }
2011
2012 foreach_reachable_handler (region_nr, is_resx, inlinable, make_eh_edge, stmt);
2013
2014 /* Make CFG profile more consistent assuming that exception will resume to first
2015 available EH handler. In practice this makes little difference, but we get
2016 fewer consistency errors in the dumps. */
2017 bb = gimple_bb (stmt);
2018 if (is_resx && EDGE_COUNT (bb->succs))
2019 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
2020 }
2021
2022 static bool mark_eh_edge_found_error;
2023
2024 /* Mark edge make_eh_edge would create for given region by setting it aux
2025 field, output error if something goes wrong. */
2026
2027 static void
2028 mark_eh_edge (struct eh_region *region, void *data)
2029 {
2030 gimple stmt;
2031 tree lab;
2032 basic_block src, dst;
2033 edge e;
2034
2035 stmt = (gimple) data;
2036 lab = get_eh_region_tree_label (region);
2037
2038 src = gimple_bb (stmt);
2039 dst = label_to_block (lab);
2040
2041 e = find_edge (src, dst);
2042 if (!e)
2043 {
2044 error ("EH edge %i->%i is missing", src->index, dst->index);
2045 mark_eh_edge_found_error = true;
2046 }
2047 else if (!(e->flags & EDGE_EH))
2048 {
2049 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
2050 mark_eh_edge_found_error = true;
2051 }
2052 else if (e->aux)
2053 {
2054 /* ??? might not be mistake. */
2055 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
2056 mark_eh_edge_found_error = true;
2057 }
2058 else
2059 e->aux = (void *)1;
2060 }
2061
2062 /* Verify that BB containing STMT as the last statement, has precisely the
2063 edges that make_eh_edges would create. */
2064
2065 bool
2066 verify_eh_edges (gimple stmt)
2067 {
2068 int region_nr;
2069 bool is_resx;
2070 basic_block bb = gimple_bb (stmt);
2071 edge_iterator ei;
2072 edge e;
2073 bool inlinable = false;
2074
2075 FOR_EACH_EDGE (e, ei, bb->succs)
2076 gcc_assert (!e->aux);
2077 mark_eh_edge_found_error = false;
2078 if (gimple_code (stmt) == GIMPLE_RESX)
2079 {
2080 region_nr = gimple_resx_region (stmt);
2081 is_resx = true;
2082 }
2083 else
2084 {
2085 region_nr = lookup_stmt_eh_region (stmt);
2086 if (region_nr < 0)
2087 {
2088 FOR_EACH_EDGE (e, ei, bb->succs)
2089 if (e->flags & EDGE_EH)
2090 {
2091 error ("BB %i can not throw but has EH edges", bb->index);
2092 return true;
2093 }
2094 return false;
2095 }
2096 if (!stmt_could_throw_p (stmt))
2097 {
2098 error ("BB %i last statement has incorrectly set region", bb->index);
2099 return true;
2100 }
2101 inlinable = inlinable_call_p (stmt);
2102 is_resx = false;
2103 }
2104
2105 foreach_reachable_handler (region_nr, is_resx, inlinable, mark_eh_edge, stmt);
2106 FOR_EACH_EDGE (e, ei, bb->succs)
2107 {
2108 if ((e->flags & EDGE_EH) && !e->aux)
2109 {
2110 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
2111 mark_eh_edge_found_error = true;
2112 return true;
2113 }
2114 e->aux = NULL;
2115 }
2116
2117 return mark_eh_edge_found_error;
2118 }
2119
2120 \f
2121 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2122
2123 bool
2124 operation_could_trap_helper_p (enum tree_code op,
2125 bool fp_operation,
2126 bool honor_trapv,
2127 bool honor_nans,
2128 bool honor_snans,
2129 tree divisor,
2130 bool *handled)
2131 {
2132 *handled = true;
2133 switch (op)
2134 {
2135 case TRUNC_DIV_EXPR:
2136 case CEIL_DIV_EXPR:
2137 case FLOOR_DIV_EXPR:
2138 case ROUND_DIV_EXPR:
2139 case EXACT_DIV_EXPR:
2140 case CEIL_MOD_EXPR:
2141 case FLOOR_MOD_EXPR:
2142 case ROUND_MOD_EXPR:
2143 case TRUNC_MOD_EXPR:
2144 case RDIV_EXPR:
2145 if (honor_snans || honor_trapv)
2146 return true;
2147 if (fp_operation)
2148 return flag_trapping_math;
2149 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2150 return true;
2151 return false;
2152
2153 case LT_EXPR:
2154 case LE_EXPR:
2155 case GT_EXPR:
2156 case GE_EXPR:
2157 case LTGT_EXPR:
2158 /* Some floating point comparisons may trap. */
2159 return honor_nans;
2160
2161 case EQ_EXPR:
2162 case NE_EXPR:
2163 case UNORDERED_EXPR:
2164 case ORDERED_EXPR:
2165 case UNLT_EXPR:
2166 case UNLE_EXPR:
2167 case UNGT_EXPR:
2168 case UNGE_EXPR:
2169 case UNEQ_EXPR:
2170 return honor_snans;
2171
2172 case CONVERT_EXPR:
2173 case FIX_TRUNC_EXPR:
2174 /* Conversion of floating point might trap. */
2175 return honor_nans;
2176
2177 case NEGATE_EXPR:
2178 case ABS_EXPR:
2179 case CONJ_EXPR:
2180 /* These operations don't trap with floating point. */
2181 if (honor_trapv)
2182 return true;
2183 return false;
2184
2185 case PLUS_EXPR:
2186 case MINUS_EXPR:
2187 case MULT_EXPR:
2188 /* Any floating arithmetic may trap. */
2189 if (fp_operation && flag_trapping_math)
2190 return true;
2191 if (honor_trapv)
2192 return true;
2193 return false;
2194
2195 default:
2196 /* Any floating arithmetic may trap. */
2197 if (fp_operation && flag_trapping_math)
2198 return true;
2199
2200 *handled = false;
2201 return false;
2202 }
2203 }
2204
2205 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2206 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2207 type operands that may trap. If OP is a division operator, DIVISOR contains
2208 the value of the divisor. */
2209
2210 bool
2211 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2212 tree divisor)
2213 {
2214 bool honor_nans = (fp_operation && flag_trapping_math
2215 && !flag_finite_math_only);
2216 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2217 bool handled;
2218
2219 if (TREE_CODE_CLASS (op) != tcc_comparison
2220 && TREE_CODE_CLASS (op) != tcc_unary
2221 && TREE_CODE_CLASS (op) != tcc_binary)
2222 return false;
2223
2224 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2225 honor_nans, honor_snans, divisor,
2226 &handled);
2227 }
2228
2229 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2230 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2231 This routine expects only GIMPLE lhs or rhs input. */
2232
2233 bool
2234 tree_could_trap_p (tree expr)
2235 {
2236 enum tree_code code;
2237 bool fp_operation = false;
2238 bool honor_trapv = false;
2239 tree t, base, div = NULL_TREE;
2240
2241 if (!expr)
2242 return false;
2243
2244 code = TREE_CODE (expr);
2245 t = TREE_TYPE (expr);
2246
2247 if (t)
2248 {
2249 if (COMPARISON_CLASS_P (expr))
2250 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2251 else
2252 fp_operation = FLOAT_TYPE_P (t);
2253 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2254 }
2255
2256 if (TREE_CODE_CLASS (code) == tcc_binary)
2257 div = TREE_OPERAND (expr, 1);
2258 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2259 return true;
2260
2261 restart:
2262 switch (code)
2263 {
2264 case TARGET_MEM_REF:
2265 /* For TARGET_MEM_REFs use the information based on the original
2266 reference. */
2267 expr = TMR_ORIGINAL (expr);
2268 code = TREE_CODE (expr);
2269 goto restart;
2270
2271 case COMPONENT_REF:
2272 case REALPART_EXPR:
2273 case IMAGPART_EXPR:
2274 case BIT_FIELD_REF:
2275 case VIEW_CONVERT_EXPR:
2276 case WITH_SIZE_EXPR:
2277 expr = TREE_OPERAND (expr, 0);
2278 code = TREE_CODE (expr);
2279 goto restart;
2280
2281 case ARRAY_RANGE_REF:
2282 base = TREE_OPERAND (expr, 0);
2283 if (tree_could_trap_p (base))
2284 return true;
2285
2286 if (TREE_THIS_NOTRAP (expr))
2287 return false;
2288
2289 return !range_in_array_bounds_p (expr);
2290
2291 case ARRAY_REF:
2292 base = TREE_OPERAND (expr, 0);
2293 if (tree_could_trap_p (base))
2294 return true;
2295
2296 if (TREE_THIS_NOTRAP (expr))
2297 return false;
2298
2299 return !in_array_bounds_p (expr);
2300
2301 case INDIRECT_REF:
2302 case ALIGN_INDIRECT_REF:
2303 case MISALIGNED_INDIRECT_REF:
2304 return !TREE_THIS_NOTRAP (expr);
2305
2306 case ASM_EXPR:
2307 return TREE_THIS_VOLATILE (expr);
2308
2309
2310 case CALL_EXPR:
2311 t = get_callee_fndecl (expr);
2312 /* Assume that calls to weak functions may trap. */
2313 if (!t || !DECL_P (t) || DECL_WEAK (t))
2314 return true;
2315 return false;
2316
2317 default:
2318 return false;
2319 }
2320 }
2321
2322
2323 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2324 an assignment or a conditional) may throw. */
2325
2326 static bool
2327 stmt_could_throw_1_p (gimple stmt)
2328 {
2329 enum tree_code code = gimple_expr_code (stmt);
2330 bool honor_nans = false;
2331 bool honor_snans = false;
2332 bool fp_operation = false;
2333 bool honor_trapv = false;
2334 tree t;
2335 size_t i;
2336 bool handled, ret;
2337
2338 if (TREE_CODE_CLASS (code) == tcc_comparison
2339 || TREE_CODE_CLASS (code) == tcc_unary
2340 || TREE_CODE_CLASS (code) == tcc_binary)
2341 {
2342 t = gimple_expr_type (stmt);
2343 fp_operation = FLOAT_TYPE_P (t);
2344 if (fp_operation)
2345 {
2346 honor_nans = flag_trapping_math && !flag_finite_math_only;
2347 honor_snans = flag_signaling_nans != 0;
2348 }
2349 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2350 honor_trapv = true;
2351 }
2352
2353 /* Check if the main expression may trap. */
2354 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2355 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2356 honor_nans, honor_snans, t,
2357 &handled);
2358 if (handled)
2359 return ret;
2360
2361 /* If the expression does not trap, see if any of the individual operands may
2362 trap. */
2363 for (i = 0; i < gimple_num_ops (stmt); i++)
2364 if (tree_could_trap_p (gimple_op (stmt, i)))
2365 return true;
2366
2367 return false;
2368 }
2369
2370
2371 /* Return true if statement STMT could throw an exception. */
2372
2373 bool
2374 stmt_could_throw_p (gimple stmt)
2375 {
2376 enum gimple_code code;
2377
2378 if (!flag_exceptions)
2379 return false;
2380
2381 /* The only statements that can throw an exception are assignments,
2382 conditionals, calls and asms. */
2383 code = gimple_code (stmt);
2384 if (code != GIMPLE_ASSIGN
2385 && code != GIMPLE_COND
2386 && code != GIMPLE_CALL
2387 && code != GIMPLE_ASM)
2388 return false;
2389
2390 /* If exceptions can only be thrown by function calls and STMT is not a
2391 GIMPLE_CALL, the statement cannot throw. */
2392 if (!flag_non_call_exceptions && code != GIMPLE_CALL)
2393 return false;
2394
2395 if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
2396 return stmt_could_throw_1_p (stmt);
2397 else if (is_gimple_call (stmt))
2398 return (gimple_call_flags (stmt) & ECF_NOTHROW) == 0;
2399 else if (gimple_code (stmt) == GIMPLE_ASM)
2400 return (gimple_asm_volatile_p (stmt));
2401 else
2402 gcc_unreachable ();
2403
2404 return false;
2405 }
2406
2407
2408 /* Return true if expression T could throw an exception. */
2409
2410 bool
2411 tree_could_throw_p (tree t)
2412 {
2413 if (!flag_exceptions)
2414 return false;
2415 if (TREE_CODE (t) == MODIFY_EXPR)
2416 {
2417 if (flag_non_call_exceptions
2418 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2419 return true;
2420 t = TREE_OPERAND (t, 1);
2421 }
2422
2423 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2424 t = TREE_OPERAND (t, 0);
2425 if (TREE_CODE (t) == CALL_EXPR)
2426 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2427 if (flag_non_call_exceptions)
2428 return tree_could_trap_p (t);
2429 return false;
2430 }
2431
2432 /* Return true if STMT can throw an exception that is not caught within
2433 the current function (CFUN). */
2434
2435 bool
2436 stmt_can_throw_external (gimple stmt)
2437 {
2438 int region_nr;
2439 bool is_resx = false;
2440 bool inlinable_call = false;
2441
2442 if (!stmt_could_throw_p (stmt))
2443 return false;
2444
2445 if (gimple_code (stmt) == GIMPLE_RESX)
2446 {
2447 region_nr = gimple_resx_region (stmt);
2448 is_resx = true;
2449 }
2450 else
2451 region_nr = lookup_stmt_eh_region (stmt);
2452
2453 if (region_nr < 0)
2454 return true;
2455
2456 return can_throw_external_1 (region_nr, is_resx, inlinable_call);
2457 }
2458
2459 /* Return true if STMT can throw an exception that is caught within
2460 the current function (CFUN). */
2461
2462 bool
2463 stmt_can_throw_internal (gimple stmt)
2464 {
2465 int region_nr;
2466 bool is_resx = false;
2467 bool inlinable_call = false;
2468
2469 if (gimple_code (stmt) == GIMPLE_RESX)
2470 {
2471 region_nr = gimple_resx_region (stmt);
2472 is_resx = true;
2473 }
2474 else
2475 {
2476 region_nr = lookup_stmt_eh_region (stmt);
2477 inlinable_call = inlinable_call_p (stmt);
2478 }
2479
2480 if (region_nr < 0)
2481 return false;
2482
2483 return can_throw_internal_1 (region_nr, is_resx, inlinable_call);
2484 }
2485
2486
2487 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2488 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2489 in the table if it should be in there. Return TRUE if a replacement was
2490 done that my require an EH edge purge. */
2491
2492 bool
2493 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2494 {
2495 int region_nr = lookup_stmt_eh_region (old_stmt);
2496
2497 if (region_nr >= 0)
2498 {
2499 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2500
2501 if (new_stmt == old_stmt && new_stmt_could_throw)
2502 return false;
2503
2504 remove_stmt_from_eh_region (old_stmt);
2505 if (new_stmt_could_throw)
2506 {
2507 add_stmt_to_eh_region (new_stmt, region_nr);
2508 return false;
2509 }
2510 else
2511 return true;
2512 }
2513
2514 return false;
2515 }
2516 \f
2517 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2518 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2519 this only handles handlers consisting of a single call, as that's the
2520 important case for C++: a destructor call for a particular object showing
2521 up in multiple handlers. */
2522
2523 static bool
2524 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2525 {
2526 gimple_stmt_iterator gsi;
2527 gimple ones, twos;
2528 unsigned int ai;
2529
2530 gsi = gsi_start (oneh);
2531 if (!gsi_one_before_end_p (gsi))
2532 return false;
2533 ones = gsi_stmt (gsi);
2534
2535 gsi = gsi_start (twoh);
2536 if (!gsi_one_before_end_p (gsi))
2537 return false;
2538 twos = gsi_stmt (gsi);
2539
2540 if (!is_gimple_call (ones)
2541 || !is_gimple_call (twos)
2542 || gimple_call_lhs (ones)
2543 || gimple_call_lhs (twos)
2544 || gimple_call_chain (ones)
2545 || gimple_call_chain (twos)
2546 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
2547 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2548 return false;
2549
2550 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2551 if (!operand_equal_p (gimple_call_arg (ones, ai),
2552 gimple_call_arg (twos, ai), 0))
2553 return false;
2554
2555 return true;
2556 }
2557
2558 /* Optimize
2559 try { A() } finally { try { ~B() } catch { ~A() } }
2560 try { ... } finally { ~A() }
2561 into
2562 try { A() } catch { ~B() }
2563 try { ~B() ... } finally { ~A() }
2564
2565 This occurs frequently in C++, where A is a local variable and B is a
2566 temporary used in the initializer for A. */
2567
2568 static void
2569 optimize_double_finally (gimple one, gimple two)
2570 {
2571 gimple oneh;
2572 gimple_stmt_iterator gsi;
2573
2574 gsi = gsi_start (gimple_try_cleanup (one));
2575 if (!gsi_one_before_end_p (gsi))
2576 return;
2577
2578 oneh = gsi_stmt (gsi);
2579 if (gimple_code (oneh) != GIMPLE_TRY
2580 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2581 return;
2582
2583 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2584 {
2585 gimple_seq seq = gimple_try_eval (oneh);
2586
2587 gimple_try_set_cleanup (one, seq);
2588 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2589 seq = copy_gimple_seq_and_replace_locals (seq);
2590 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2591 gimple_try_set_eval (two, seq);
2592 }
2593 }
2594
2595 /* Perform EH refactoring optimizations that are simpler to do when code
2596 flow has been lowered but EH structures haven't. */
2597
2598 static void
2599 refactor_eh_r (gimple_seq seq)
2600 {
2601 gimple_stmt_iterator gsi;
2602 gimple one, two;
2603
2604 one = NULL;
2605 two = NULL;
2606 gsi = gsi_start (seq);
2607 while (1)
2608 {
2609 one = two;
2610 if (gsi_end_p (gsi))
2611 two = NULL;
2612 else
2613 two = gsi_stmt (gsi);
2614 if (one
2615 && two
2616 && gimple_code (one) == GIMPLE_TRY
2617 && gimple_code (two) == GIMPLE_TRY
2618 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2619 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2620 optimize_double_finally (one, two);
2621 if (one)
2622 switch (gimple_code (one))
2623 {
2624 case GIMPLE_TRY:
2625 refactor_eh_r (gimple_try_eval (one));
2626 refactor_eh_r (gimple_try_cleanup (one));
2627 break;
2628 case GIMPLE_CATCH:
2629 refactor_eh_r (gimple_catch_handler (one));
2630 break;
2631 case GIMPLE_EH_FILTER:
2632 refactor_eh_r (gimple_eh_filter_failure (one));
2633 break;
2634 default:
2635 break;
2636 }
2637 if (two)
2638 gsi_next (&gsi);
2639 else
2640 break;
2641 }
2642 }
2643
2644 static unsigned
2645 refactor_eh (void)
2646 {
2647 refactor_eh_r (gimple_body (current_function_decl));
2648 return 0;
2649 }
2650
2651 struct gimple_opt_pass pass_refactor_eh =
2652 {
2653 {
2654 GIMPLE_PASS,
2655 "ehopt", /* name */
2656 NULL, /* gate */
2657 refactor_eh, /* execute */
2658 NULL, /* sub */
2659 NULL, /* next */
2660 0, /* static_pass_number */
2661 TV_TREE_EH, /* tv_id */
2662 PROP_gimple_lcf, /* properties_required */
2663 0, /* properties_provided */
2664 0, /* properties_destroyed */
2665 0, /* todo_flags_start */
2666 TODO_dump_func /* todo_flags_finish */
2667 }
2668 };
2669
2670 /* Walk statements, see what regions are really references and remove unreachable ones. */
2671
2672 static void
2673 tree_remove_unreachable_handlers (void)
2674 {
2675 sbitmap reachable, contains_stmt;
2676 VEC(int,heap) * label_to_region;
2677 basic_block bb;
2678
2679 label_to_region = label_to_region_map ();
2680 reachable = sbitmap_alloc (num_eh_regions ());
2681 sbitmap_zero (reachable);
2682 contains_stmt = sbitmap_alloc (num_eh_regions ());
2683 sbitmap_zero (contains_stmt);
2684
2685 FOR_EACH_BB (bb)
2686 {
2687 gimple_stmt_iterator gsi;
2688 int region;
2689 bool has_eh_preds = bb_has_eh_pred (bb);
2690
2691 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2692 {
2693 gimple stmt = gsi_stmt (gsi);
2694
2695 if (gimple_code (stmt) == GIMPLE_LABEL && has_eh_preds)
2696 {
2697 int uid = LABEL_DECL_UID (gimple_label_label (stmt));
2698 int region = VEC_index (int, label_to_region, uid);
2699 SET_BIT (reachable, region);
2700 }
2701 if (gimple_code (stmt) == RESX)
2702 SET_BIT (reachable, gimple_resx_region (stmt));
2703 if ((region = lookup_stmt_eh_region (stmt)) >= 0)
2704 SET_BIT (contains_stmt, region);
2705 }
2706 }
2707
2708 if (dump_file)
2709 {
2710 fprintf (dump_file, "Before removal of unreachable regions:\n");
2711 dump_eh_tree (dump_file, cfun);
2712 fprintf (dump_file, "Reachable regions: ");
2713 dump_sbitmap_file (dump_file, reachable);
2714 fprintf (dump_file, "Regions containing insns: ");
2715 dump_sbitmap_file (dump_file, contains_stmt);
2716 }
2717
2718 remove_unreachable_regions (reachable, contains_stmt);
2719 sbitmap_free (reachable);
2720 sbitmap_free (contains_stmt);
2721 VEC_free (int, heap, label_to_region);
2722 if (dump_file)
2723 {
2724 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
2725 dump_eh_tree (dump_file, cfun);
2726 fprintf (dump_file, "\n\n");
2727 }
2728 }
2729
2730 /* Pattern match emtpy EH receiver looking like:
2731
2732 save_filt.6352_662 = [filter_expr] <<<filter object>>>;
2733 save_eptr.6351_663 = [exc_ptr_expr] <<<exception object>>>;
2734 <<<exception object>>> = save_eptr.6351_663;
2735 <<<filter object>>> = save_filt.6352_662;
2736 resx 1
2737 */
2738
2739 static int
2740 tree_empty_eh_handler_p (basic_block bb)
2741 {
2742 gimple_stmt_iterator gsi;
2743 int region;
2744
2745 gsi = gsi_last_bb (bb);
2746
2747 /* RESX */
2748 if (gsi_end_p (gsi))
2749 return 0;
2750 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_RESX)
2751 return 0;
2752 region = gimple_resx_region (gsi_stmt (gsi));
2753
2754 /* filter_object set. */
2755 gsi_prev (&gsi);
2756 if (gsi_end_p (gsi))
2757 return 0;
2758 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2759 return 0;
2760 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != FILTER_EXPR)
2761 return 0;
2762
2763 /* filter_object set. */
2764 gsi_prev (&gsi);
2765 if (gsi_end_p (gsi))
2766 return 0;
2767 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2768 return 0;
2769 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != EXC_PTR_EXPR)
2770 return 0;
2771
2772 /* filter_object get. */
2773 gsi_prev (&gsi);
2774 if (gsi_end_p (gsi))
2775 return 0;
2776 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2777 return 0;
2778 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != EXC_PTR_EXPR)
2779 return 0;
2780
2781 /* filter_object get. */
2782 gsi_prev (&gsi);
2783 if (gsi_end_p (gsi))
2784 return 0;
2785 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2786 return 0;
2787 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != FILTER_EXPR)
2788 return 0;
2789
2790 /* label. */
2791 gsi_prev (&gsi);
2792 if (gsi_end_p (gsi))
2793 return 0;
2794 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
2795 return region;
2796 else
2797 return 0;
2798 }
2799
2800 static bool dominance_info_invalidated;
2801
2802 /* Look for basic blocks containing empty exception handler and remove them.
2803 This is similar to jump forwarding, just across EH edges. */
2804
2805 static bool
2806 cleanup_empty_eh (basic_block bb)
2807 {
2808 int region;
2809
2810 /* When handler of EH region winds up to be empty, we can safely
2811 remove it. This leads to inner EH regions to be redirected
2812 to outer one, if present in function. So we need to rebuild
2813 EH edges in all sources. */
2814 if ((region = tree_empty_eh_handler_p (bb)))
2815 {
2816 edge_iterator ei;
2817 edge e;
2818 gimple_stmt_iterator si;
2819
2820 remove_eh_region (region);
2821
2822 /* It is safe to mark symbol for renaming because we have abnormal PHI
2823 here. Once EH edges are made redirectable we might need to add here
2824 similar updating as jump threading does. */
2825
2826 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2827 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (gsi_stmt (si))));
2828
2829 while ((e = ei_safe_edge (ei_start (bb->preds))))
2830 {
2831 basic_block src = e->src;
2832 gcc_assert (e->flags & EDGE_EH);
2833 for (ei = ei_start (src->succs); (e = ei_safe_edge (ei));)
2834 {
2835 if (e->flags & EDGE_EH)
2836 {
2837 remove_edge (e);
2838 dominance_info_invalidated = true;
2839 }
2840 else
2841 ei_next (&ei);
2842 }
2843 if (!stmt_can_throw_internal (last_stmt (src)))
2844 continue;
2845 make_eh_edges (last_stmt (src));
2846 FOR_EACH_EDGE (e, ei, src->succs)
2847 if (e->flags & EDGE_EH)
2848 {
2849 dominance_info_invalidated = true;
2850 for (si = gsi_start_phis (e->dest); !gsi_end_p (si);
2851 gsi_next (&si))
2852 mark_sym_for_renaming (SSA_NAME_VAR
2853 (PHI_RESULT (gsi_stmt (si))));
2854 }
2855 }
2856 if (dump_file)
2857 fprintf (dump_file, "Empty EH handler %i removed\n", region);
2858 delete_basic_block (bb);
2859 return true;
2860 }
2861 return false;
2862 }
2863
2864
2865 /* Perform cleanups and lowering of exception handling
2866 1) cleanups regions with handlers doing nothing are optimized out
2867 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
2868 3) Info about regions that are containing instructions, and regions
2869 reachable via local EH edges is collected
2870 4) Eh tree is pruned for regions no longer neccesary.
2871 */
2872
2873 static unsigned int
2874 cleanup_eh (void)
2875 {
2876 bool changed = false;
2877 basic_block bb;
2878 int i;
2879
2880 if (!cfun->eh)
2881 return 0;
2882 if (dump_file)
2883 {
2884 fprintf (dump_file, "Before cleanups:\n");
2885 dump_eh_tree (dump_file, cfun);
2886 }
2887
2888 dominance_info_invalidated = false;
2889 /* We cannot use FOR_EACH_BB, since the basic blocks may get removed. */
2890 for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
2891 {
2892 bb = BASIC_BLOCK (i);
2893 if (bb)
2894 changed |= cleanup_empty_eh (bb);
2895 }
2896 if (dominance_info_invalidated)
2897 {
2898 free_dominance_info (CDI_DOMINATORS);
2899 free_dominance_info (CDI_POST_DOMINATORS);
2900 }
2901
2902 /* Removing contained cleanup can render MUST_NOT_THROW regions empty. */
2903 if (changed)
2904 delete_unreachable_blocks ();
2905
2906 tree_remove_unreachable_handlers ();
2907 if (dump_file)
2908 {
2909 fprintf (dump_file, "After cleanups:\n");
2910 dump_eh_tree (dump_file, cfun);
2911 }
2912
2913 return (changed ? TODO_cleanup_cfg | TODO_update_ssa : 0);
2914 }
2915
2916 struct gimple_opt_pass pass_cleanup_eh = {
2917 {
2918 GIMPLE_PASS,
2919 "ehcleanup", /* name */
2920 NULL, /* gate */
2921 cleanup_eh, /* execute */
2922 NULL, /* sub */
2923 NULL, /* next */
2924 0, /* static_pass_number */
2925 TV_TREE_EH, /* tv_id */
2926 PROP_gimple_lcf, /* properties_required */
2927 0, /* properties_provided */
2928 0, /* properties_destroyed */
2929 0, /* todo_flags_start */
2930 TODO_dump_func /* todo_flags_finish */
2931 }
2932 };