re PR c++/23167 (internal compiler error: in create_tmp_var)
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
48
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52 /* Local declarations. */
53
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
56
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70 struct edge_to_cases_elt
71 {
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
74
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
80 };
81
82 static htab_t edge_to_cases;
83
84 /* CFG statistics. */
85 struct cfg_stats_d
86 {
87 long num_merged_labels;
88 };
89
90 static struct cfg_stats_d cfg_stats;
91
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
94
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
99
100 /* Edges. */
101 static void make_edges (void);
102 static void make_ctrl_stmt_edges (basic_block);
103 static void make_exit_edges (basic_block);
104 static void make_cond_expr_edges (basic_block);
105 static void make_switch_expr_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static edge tree_redirect_edge_and_branch (edge, basic_block);
108 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109 static void split_critical_edges (void);
110
111 /* Various helpers. */
112 static inline bool stmt_starts_bb_p (tree, tree);
113 static int tree_verify_flow_info (void);
114 static void tree_make_forwarder_block (edge);
115 static void tree_cfg2vcg (FILE *);
116
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
125
126 void
127 init_empty_tree_cfg (void)
128 {
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = 0;
133 last_basic_block = 0;
134 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
135
136 /* Build a mapping of labels to their associated blocks. */
137 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
138 "label to block map");
139
140 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
141 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
142 }
143
144 /*---------------------------------------------------------------------------
145 Create basic blocks
146 ---------------------------------------------------------------------------*/
147
148 /* Entry point to the CFG builder for trees. TP points to the list of
149 statements to be added to the flowgraph. */
150
151 static void
152 build_tree_cfg (tree *tp)
153 {
154 /* Register specific tree functions. */
155 tree_register_cfg_hooks ();
156
157 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
158
159 init_empty_tree_cfg ();
160
161 found_computed_goto = 0;
162 make_blocks (*tp);
163
164 /* Computed gotos are hell to deal with, especially if there are
165 lots of them with a large number of destinations. So we factor
166 them to a common computed goto location before we build the
167 edge list. After we convert back to normal form, we will un-factor
168 the computed gotos since factoring introduces an unwanted jump. */
169 if (found_computed_goto)
170 factor_computed_gotos ();
171
172 /* Make sure there is always at least one block, even if it's empty. */
173 if (n_basic_blocks == 0)
174 create_empty_bb (ENTRY_BLOCK_PTR);
175
176 /* Adjust the size of the array. */
177 VARRAY_GROW (basic_block_info, n_basic_blocks);
178
179 /* To speed up statement iterator walks, we first purge dead labels. */
180 cleanup_dead_labels ();
181
182 /* Group case nodes to reduce the number of edges.
183 We do this after cleaning up dead labels because otherwise we miss
184 a lot of obvious case merging opportunities. */
185 group_case_labels ();
186
187 /* Create the edges of the flowgraph. */
188 make_edges ();
189
190 /* Debugging dumps. */
191
192 /* Write the flowgraph to a VCG file. */
193 {
194 int local_dump_flags;
195 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
196 if (dump_file)
197 {
198 tree_cfg2vcg (dump_file);
199 dump_end (TDI_vcg, dump_file);
200 }
201 }
202
203 #ifdef ENABLE_CHECKING
204 verify_stmts ();
205 #endif
206
207 /* Dump a textual representation of the flowgraph. */
208 if (dump_file)
209 dump_tree_cfg (dump_file, dump_flags);
210 }
211
212 static void
213 execute_build_cfg (void)
214 {
215 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
216 }
217
218 struct tree_opt_pass pass_build_cfg =
219 {
220 "cfg", /* name */
221 NULL, /* gate */
222 execute_build_cfg, /* execute */
223 NULL, /* sub */
224 NULL, /* next */
225 0, /* static_pass_number */
226 TV_TREE_CFG, /* tv_id */
227 PROP_gimple_leh, /* properties_required */
228 PROP_cfg, /* properties_provided */
229 0, /* properties_destroyed */
230 0, /* todo_flags_start */
231 TODO_verify_stmts, /* todo_flags_finish */
232 0 /* letter */
233 };
234
235 /* Search the CFG for any computed gotos. If found, factor them to a
236 common computed goto site. Also record the location of that site so
237 that we can un-factor the gotos after we have converted back to
238 normal form. */
239
240 static void
241 factor_computed_gotos (void)
242 {
243 basic_block bb;
244 tree factored_label_decl = NULL;
245 tree var = NULL;
246 tree factored_computed_goto_label = NULL;
247 tree factored_computed_goto = NULL;
248
249 /* We know there are one or more computed gotos in this function.
250 Examine the last statement in each basic block to see if the block
251 ends with a computed goto. */
252
253 FOR_EACH_BB (bb)
254 {
255 block_stmt_iterator bsi = bsi_last (bb);
256 tree last;
257
258 if (bsi_end_p (bsi))
259 continue;
260 last = bsi_stmt (bsi);
261
262 /* Ignore the computed goto we create when we factor the original
263 computed gotos. */
264 if (last == factored_computed_goto)
265 continue;
266
267 /* If the last statement is a computed goto, factor it. */
268 if (computed_goto_p (last))
269 {
270 tree assignment;
271
272 /* The first time we find a computed goto we need to create
273 the factored goto block and the variable each original
274 computed goto will use for their goto destination. */
275 if (! factored_computed_goto)
276 {
277 basic_block new_bb = create_empty_bb (bb);
278 block_stmt_iterator new_bsi = bsi_start (new_bb);
279
280 /* Create the destination of the factored goto. Each original
281 computed goto will put its desired destination into this
282 variable and jump to the label we create immediately
283 below. */
284 var = create_tmp_var (ptr_type_node, "gotovar");
285
286 /* Build a label for the new block which will contain the
287 factored computed goto. */
288 factored_label_decl = create_artificial_label ();
289 factored_computed_goto_label
290 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
291 bsi_insert_after (&new_bsi, factored_computed_goto_label,
292 BSI_NEW_STMT);
293
294 /* Build our new computed goto. */
295 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
296 bsi_insert_after (&new_bsi, factored_computed_goto,
297 BSI_NEW_STMT);
298 }
299
300 /* Copy the original computed goto's destination into VAR. */
301 assignment = build (MODIFY_EXPR, ptr_type_node,
302 var, GOTO_DESTINATION (last));
303 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
304
305 /* And re-vector the computed goto to the new destination. */
306 GOTO_DESTINATION (last) = factored_label_decl;
307 }
308 }
309 }
310
311
312 /* Build a flowgraph for the statement_list STMT_LIST. */
313
314 static void
315 make_blocks (tree stmt_list)
316 {
317 tree_stmt_iterator i = tsi_start (stmt_list);
318 tree stmt = NULL;
319 bool start_new_block = true;
320 bool first_stmt_of_list = true;
321 basic_block bb = ENTRY_BLOCK_PTR;
322
323 while (!tsi_end_p (i))
324 {
325 tree prev_stmt;
326
327 prev_stmt = stmt;
328 stmt = tsi_stmt (i);
329
330 /* If the statement starts a new basic block or if we have determined
331 in a previous pass that we need to create a new block for STMT, do
332 so now. */
333 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
334 {
335 if (!first_stmt_of_list)
336 stmt_list = tsi_split_statement_list_before (&i);
337 bb = create_basic_block (stmt_list, NULL, bb);
338 start_new_block = false;
339 }
340
341 /* Now add STMT to BB and create the subgraphs for special statement
342 codes. */
343 set_bb_for_stmt (stmt, bb);
344
345 if (computed_goto_p (stmt))
346 found_computed_goto = true;
347
348 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
349 next iteration. */
350 if (stmt_ends_bb_p (stmt))
351 start_new_block = true;
352
353 tsi_next (&i);
354 first_stmt_of_list = false;
355 }
356 }
357
358
359 /* Create and return a new empty basic block after bb AFTER. */
360
361 static basic_block
362 create_bb (void *h, void *e, basic_block after)
363 {
364 basic_block bb;
365
366 gcc_assert (!e);
367
368 /* Create and initialize a new basic block. Since alloc_block uses
369 ggc_alloc_cleared to allocate a basic block, we do not have to
370 clear the newly allocated basic block here. */
371 bb = alloc_block ();
372
373 bb->index = last_basic_block;
374 bb->flags = BB_NEW;
375 bb->stmt_list = h ? h : alloc_stmt_list ();
376
377 /* Add the new block to the linked list of blocks. */
378 link_block (bb, after);
379
380 /* Grow the basic block array if needed. */
381 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
382 {
383 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
384 VARRAY_GROW (basic_block_info, new_size);
385 }
386
387 /* Add the newly created block to the array. */
388 BASIC_BLOCK (last_basic_block) = bb;
389
390 n_basic_blocks++;
391 last_basic_block++;
392
393 return bb;
394 }
395
396
397 /*---------------------------------------------------------------------------
398 Edge creation
399 ---------------------------------------------------------------------------*/
400
401 /* Fold COND_EXPR_COND of each COND_EXPR. */
402
403 void
404 fold_cond_expr_cond (void)
405 {
406 basic_block bb;
407
408 FOR_EACH_BB (bb)
409 {
410 tree stmt = last_stmt (bb);
411
412 if (stmt
413 && TREE_CODE (stmt) == COND_EXPR)
414 {
415 tree cond = fold (COND_EXPR_COND (stmt));
416 if (integer_zerop (cond))
417 COND_EXPR_COND (stmt) = boolean_false_node;
418 else if (integer_onep (cond))
419 COND_EXPR_COND (stmt) = boolean_true_node;
420 }
421 }
422 }
423
424 /* Join all the blocks in the flowgraph. */
425
426 static void
427 make_edges (void)
428 {
429 basic_block bb;
430
431 /* Create an edge from entry to the first block with executable
432 statements in it. */
433 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
434
435 /* Traverse the basic block array placing edges. */
436 FOR_EACH_BB (bb)
437 {
438 tree first = first_stmt (bb);
439 tree last = last_stmt (bb);
440
441 if (first)
442 {
443 /* Edges for statements that always alter flow control. */
444 if (is_ctrl_stmt (last))
445 make_ctrl_stmt_edges (bb);
446
447 /* Edges for statements that sometimes alter flow control. */
448 if (is_ctrl_altering_stmt (last))
449 make_exit_edges (bb);
450 }
451
452 /* Finally, if no edges were created above, this is a regular
453 basic block that only needs a fallthru edge. */
454 if (EDGE_COUNT (bb->succs) == 0)
455 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
456 }
457
458 /* We do not care about fake edges, so remove any that the CFG
459 builder inserted for completeness. */
460 remove_fake_exit_edges ();
461
462 /* Fold COND_EXPR_COND of each COND_EXPR. */
463 fold_cond_expr_cond ();
464
465 /* Clean up the graph and warn for unreachable code. */
466 cleanup_tree_cfg ();
467 }
468
469
470 /* Create edges for control statement at basic block BB. */
471
472 static void
473 make_ctrl_stmt_edges (basic_block bb)
474 {
475 tree last = last_stmt (bb);
476
477 gcc_assert (last);
478 switch (TREE_CODE (last))
479 {
480 case GOTO_EXPR:
481 make_goto_expr_edges (bb);
482 break;
483
484 case RETURN_EXPR:
485 make_edge (bb, EXIT_BLOCK_PTR, 0);
486 break;
487
488 case COND_EXPR:
489 make_cond_expr_edges (bb);
490 break;
491
492 case SWITCH_EXPR:
493 make_switch_expr_edges (bb);
494 break;
495
496 case RESX_EXPR:
497 make_eh_edges (last);
498 /* Yet another NORETURN hack. */
499 if (EDGE_COUNT (bb->succs) == 0)
500 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
501 break;
502
503 default:
504 gcc_unreachable ();
505 }
506 }
507
508
509 /* Create exit edges for statements in block BB that alter the flow of
510 control. Statements that alter the control flow are 'goto', 'return'
511 and calls to non-returning functions. */
512
513 static void
514 make_exit_edges (basic_block bb)
515 {
516 tree last = last_stmt (bb), op;
517
518 gcc_assert (last);
519 switch (TREE_CODE (last))
520 {
521 case RESX_EXPR:
522 break;
523 case CALL_EXPR:
524 /* If this function receives a nonlocal goto, then we need to
525 make edges from this call site to all the nonlocal goto
526 handlers. */
527 if (TREE_SIDE_EFFECTS (last)
528 && current_function_has_nonlocal_label)
529 make_goto_expr_edges (bb);
530
531 /* If this statement has reachable exception handlers, then
532 create abnormal edges to them. */
533 make_eh_edges (last);
534
535 /* Some calls are known not to return. For such calls we create
536 a fake edge.
537
538 We really need to revamp how we build edges so that it's not
539 such a bloody pain to avoid creating edges for this case since
540 all we do is remove these edges when we're done building the
541 CFG. */
542 if (call_expr_flags (last) & ECF_NORETURN)
543 {
544 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
545 return;
546 }
547
548 /* Don't forget the fall-thru edge. */
549 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
550 break;
551
552 case MODIFY_EXPR:
553 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
554 may have an abnormal edge. Search the RHS for this case and
555 create any required edges. */
556 op = get_call_expr_in (last);
557 if (op && TREE_SIDE_EFFECTS (op)
558 && current_function_has_nonlocal_label)
559 make_goto_expr_edges (bb);
560
561 make_eh_edges (last);
562 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563 break;
564
565 default:
566 gcc_unreachable ();
567 }
568 }
569
570
571 /* Create the edges for a COND_EXPR starting at block BB.
572 At this point, both clauses must contain only simple gotos. */
573
574 static void
575 make_cond_expr_edges (basic_block bb)
576 {
577 tree entry = last_stmt (bb);
578 basic_block then_bb, else_bb;
579 tree then_label, else_label;
580 edge e;
581
582 gcc_assert (entry);
583 gcc_assert (TREE_CODE (entry) == COND_EXPR);
584
585 /* Entry basic blocks for each component. */
586 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
587 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
588 then_bb = label_to_block (then_label);
589 else_bb = label_to_block (else_label);
590
591 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
592 #ifdef USE_MAPPED_LOCATION
593 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
594 #else
595 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
596 #endif
597 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
598 if (e)
599 {
600 #ifdef USE_MAPPED_LOCATION
601 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
602 #else
603 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
604 #endif
605 }
606 }
607
608 /* Hashing routine for EDGE_TO_CASES. */
609
610 static hashval_t
611 edge_to_cases_hash (const void *p)
612 {
613 edge e = ((struct edge_to_cases_elt *)p)->e;
614
615 /* Hash on the edge itself (which is a pointer). */
616 return htab_hash_pointer (e);
617 }
618
619 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
620 for equality is just a pointer comparison. */
621
622 static int
623 edge_to_cases_eq (const void *p1, const void *p2)
624 {
625 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
626 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
627
628 return e1 == e2;
629 }
630
631 /* Called for each element in the hash table (P) as we delete the
632 edge to cases hash table.
633
634 Clear all the TREE_CHAINs to prevent problems with copying of
635 SWITCH_EXPRs and structure sharing rules, then free the hash table
636 element. */
637
638 static void
639 edge_to_cases_cleanup (void *p)
640 {
641 struct edge_to_cases_elt *elt = p;
642 tree t, next;
643
644 for (t = elt->case_labels; t; t = next)
645 {
646 next = TREE_CHAIN (t);
647 TREE_CHAIN (t) = NULL;
648 }
649 free (p);
650 }
651
652 /* Start recording information mapping edges to case labels. */
653
654 void
655 start_recording_case_labels (void)
656 {
657 gcc_assert (edge_to_cases == NULL);
658
659 edge_to_cases = htab_create (37,
660 edge_to_cases_hash,
661 edge_to_cases_eq,
662 edge_to_cases_cleanup);
663 }
664
665 /* Return nonzero if we are recording information for case labels. */
666
667 static bool
668 recording_case_labels_p (void)
669 {
670 return (edge_to_cases != NULL);
671 }
672
673 /* Stop recording information mapping edges to case labels and
674 remove any information we have recorded. */
675 void
676 end_recording_case_labels (void)
677 {
678 htab_delete (edge_to_cases);
679 edge_to_cases = NULL;
680 }
681
682 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
683
684 static void
685 record_switch_edge (edge e, tree case_label)
686 {
687 struct edge_to_cases_elt *elt;
688 void **slot;
689
690 /* Build a hash table element so we can see if E is already
691 in the table. */
692 elt = xmalloc (sizeof (struct edge_to_cases_elt));
693 elt->e = e;
694 elt->case_labels = case_label;
695
696 slot = htab_find_slot (edge_to_cases, elt, INSERT);
697
698 if (*slot == NULL)
699 {
700 /* E was not in the hash table. Install E into the hash table. */
701 *slot = (void *)elt;
702 }
703 else
704 {
705 /* E was already in the hash table. Free ELT as we do not need it
706 anymore. */
707 free (elt);
708
709 /* Get the entry stored in the hash table. */
710 elt = (struct edge_to_cases_elt *) *slot;
711
712 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
713 TREE_CHAIN (case_label) = elt->case_labels;
714 elt->case_labels = case_label;
715 }
716 }
717
718 /* If we are inside a {start,end}_recording_cases block, then return
719 a chain of CASE_LABEL_EXPRs from T which reference E.
720
721 Otherwise return NULL. */
722
723 static tree
724 get_cases_for_edge (edge e, tree t)
725 {
726 struct edge_to_cases_elt elt, *elt_p;
727 void **slot;
728 size_t i, n;
729 tree vec;
730
731 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
732 chains available. Return NULL so the caller can detect this case. */
733 if (!recording_case_labels_p ())
734 return NULL;
735
736 restart:
737 elt.e = e;
738 elt.case_labels = NULL;
739 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
740
741 if (slot)
742 {
743 elt_p = (struct edge_to_cases_elt *)*slot;
744 return elt_p->case_labels;
745 }
746
747 /* If we did not find E in the hash table, then this must be the first
748 time we have been queried for information about E & T. Add all the
749 elements from T to the hash table then perform the query again. */
750
751 vec = SWITCH_LABELS (t);
752 n = TREE_VEC_LENGTH (vec);
753 for (i = 0; i < n; i++)
754 {
755 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
756 basic_block label_bb = label_to_block (lab);
757 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
758 }
759 goto restart;
760 }
761
762 /* Create the edges for a SWITCH_EXPR starting at block BB.
763 At this point, the switch body has been lowered and the
764 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
765
766 static void
767 make_switch_expr_edges (basic_block bb)
768 {
769 tree entry = last_stmt (bb);
770 size_t i, n;
771 tree vec;
772
773 vec = SWITCH_LABELS (entry);
774 n = TREE_VEC_LENGTH (vec);
775
776 for (i = 0; i < n; ++i)
777 {
778 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
779 basic_block label_bb = label_to_block (lab);
780 make_edge (bb, label_bb, 0);
781 }
782 }
783
784
785 /* Return the basic block holding label DEST. */
786
787 basic_block
788 label_to_block_fn (struct function *ifun, tree dest)
789 {
790 int uid = LABEL_DECL_UID (dest);
791
792 /* We would die hard when faced by an undefined label. Emit a label to
793 the very first basic block. This will hopefully make even the dataflow
794 and undefined variable warnings quite right. */
795 if ((errorcount || sorrycount) && uid < 0)
796 {
797 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
798 tree stmt;
799
800 stmt = build1 (LABEL_EXPR, void_type_node, dest);
801 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
802 uid = LABEL_DECL_UID (dest);
803 }
804 if (VARRAY_SIZE (ifun->cfg->x_label_to_block_map) <= (unsigned int)uid)
805 return NULL;
806 return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
807 }
808
809 /* Create edges for a goto statement at block BB. */
810
811 static void
812 make_goto_expr_edges (basic_block bb)
813 {
814 tree goto_t;
815 basic_block target_bb;
816 int for_call;
817 block_stmt_iterator last = bsi_last (bb);
818
819 goto_t = bsi_stmt (last);
820
821 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
822 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
823 from a nonlocal goto. */
824 if (TREE_CODE (goto_t) != GOTO_EXPR)
825 for_call = 1;
826 else
827 {
828 tree dest = GOTO_DESTINATION (goto_t);
829 for_call = 0;
830
831 /* A GOTO to a local label creates normal edges. */
832 if (simple_goto_p (goto_t))
833 {
834 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
835 #ifdef USE_MAPPED_LOCATION
836 e->goto_locus = EXPR_LOCATION (goto_t);
837 #else
838 e->goto_locus = EXPR_LOCUS (goto_t);
839 #endif
840 bsi_remove (&last);
841 return;
842 }
843
844 /* Nothing more to do for nonlocal gotos. */
845 if (TREE_CODE (dest) == LABEL_DECL)
846 return;
847
848 /* Computed gotos remain. */
849 }
850
851 /* Look for the block starting with the destination label. In the
852 case of a computed goto, make an edge to any label block we find
853 in the CFG. */
854 FOR_EACH_BB (target_bb)
855 {
856 block_stmt_iterator bsi;
857
858 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
859 {
860 tree target = bsi_stmt (bsi);
861
862 if (TREE_CODE (target) != LABEL_EXPR)
863 break;
864
865 if (
866 /* Computed GOTOs. Make an edge to every label block that has
867 been marked as a potential target for a computed goto. */
868 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
869 /* Nonlocal GOTO target. Make an edge to every label block
870 that has been marked as a potential target for a nonlocal
871 goto. */
872 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
873 {
874 make_edge (bb, target_bb, EDGE_ABNORMAL);
875 break;
876 }
877 }
878 }
879
880 /* Degenerate case of computed goto with no labels. */
881 if (!for_call && EDGE_COUNT (bb->succs) == 0)
882 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
883 }
884
885
886 /*---------------------------------------------------------------------------
887 Flowgraph analysis
888 ---------------------------------------------------------------------------*/
889
890 /* Cleanup useless labels in basic blocks. This is something we wish
891 to do early because it allows us to group case labels before creating
892 the edges for the CFG, and it speeds up block statement iterators in
893 all passes later on.
894 We only run this pass once, running it more than once is probably not
895 profitable. */
896
897 /* A map from basic block index to the leading label of that block. */
898 static tree *label_for_bb;
899
900 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
901 static void
902 update_eh_label (struct eh_region *region)
903 {
904 tree old_label = get_eh_region_tree_label (region);
905 if (old_label)
906 {
907 tree new_label;
908 basic_block bb = label_to_block (old_label);
909
910 /* ??? After optimizing, there may be EH regions with labels
911 that have already been removed from the function body, so
912 there is no basic block for them. */
913 if (! bb)
914 return;
915
916 new_label = label_for_bb[bb->index];
917 set_eh_region_tree_label (region, new_label);
918 }
919 }
920
921 /* Given LABEL return the first label in the same basic block. */
922 static tree
923 main_block_label (tree label)
924 {
925 basic_block bb = label_to_block (label);
926
927 /* label_to_block possibly inserted undefined label into the chain. */
928 if (!label_for_bb[bb->index])
929 label_for_bb[bb->index] = label;
930 return label_for_bb[bb->index];
931 }
932
933 /* Cleanup redundant labels. This is a three-step process:
934 1) Find the leading label for each block.
935 2) Redirect all references to labels to the leading labels.
936 3) Cleanup all useless labels. */
937
938 void
939 cleanup_dead_labels (void)
940 {
941 basic_block bb;
942 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
943
944 /* Find a suitable label for each block. We use the first user-defined
945 label if there is one, or otherwise just the first label we see. */
946 FOR_EACH_BB (bb)
947 {
948 block_stmt_iterator i;
949
950 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
951 {
952 tree label, stmt = bsi_stmt (i);
953
954 if (TREE_CODE (stmt) != LABEL_EXPR)
955 break;
956
957 label = LABEL_EXPR_LABEL (stmt);
958
959 /* If we have not yet seen a label for the current block,
960 remember this one and see if there are more labels. */
961 if (! label_for_bb[bb->index])
962 {
963 label_for_bb[bb->index] = label;
964 continue;
965 }
966
967 /* If we did see a label for the current block already, but it
968 is an artificially created label, replace it if the current
969 label is a user defined label. */
970 if (! DECL_ARTIFICIAL (label)
971 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
972 {
973 label_for_bb[bb->index] = label;
974 break;
975 }
976 }
977 }
978
979 /* Now redirect all jumps/branches to the selected label.
980 First do so for each block ending in a control statement. */
981 FOR_EACH_BB (bb)
982 {
983 tree stmt = last_stmt (bb);
984 if (!stmt)
985 continue;
986
987 switch (TREE_CODE (stmt))
988 {
989 case COND_EXPR:
990 {
991 tree true_branch, false_branch;
992
993 true_branch = COND_EXPR_THEN (stmt);
994 false_branch = COND_EXPR_ELSE (stmt);
995
996 GOTO_DESTINATION (true_branch)
997 = main_block_label (GOTO_DESTINATION (true_branch));
998 GOTO_DESTINATION (false_branch)
999 = main_block_label (GOTO_DESTINATION (false_branch));
1000
1001 break;
1002 }
1003
1004 case SWITCH_EXPR:
1005 {
1006 size_t i;
1007 tree vec = SWITCH_LABELS (stmt);
1008 size_t n = TREE_VEC_LENGTH (vec);
1009
1010 /* Replace all destination labels. */
1011 for (i = 0; i < n; ++i)
1012 {
1013 tree elt = TREE_VEC_ELT (vec, i);
1014 tree label = main_block_label (CASE_LABEL (elt));
1015 CASE_LABEL (elt) = label;
1016 }
1017 break;
1018 }
1019
1020 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1021 remove them until after we've created the CFG edges. */
1022 case GOTO_EXPR:
1023 if (! computed_goto_p (stmt))
1024 {
1025 GOTO_DESTINATION (stmt)
1026 = main_block_label (GOTO_DESTINATION (stmt));
1027 break;
1028 }
1029
1030 default:
1031 break;
1032 }
1033 }
1034
1035 for_each_eh_region (update_eh_label);
1036
1037 /* Finally, purge dead labels. All user-defined labels and labels that
1038 can be the target of non-local gotos are preserved. */
1039 FOR_EACH_BB (bb)
1040 {
1041 block_stmt_iterator i;
1042 tree label_for_this_bb = label_for_bb[bb->index];
1043
1044 if (! label_for_this_bb)
1045 continue;
1046
1047 for (i = bsi_start (bb); !bsi_end_p (i); )
1048 {
1049 tree label, stmt = bsi_stmt (i);
1050
1051 if (TREE_CODE (stmt) != LABEL_EXPR)
1052 break;
1053
1054 label = LABEL_EXPR_LABEL (stmt);
1055
1056 if (label == label_for_this_bb
1057 || ! DECL_ARTIFICIAL (label)
1058 || DECL_NONLOCAL (label))
1059 bsi_next (&i);
1060 else
1061 bsi_remove (&i);
1062 }
1063 }
1064
1065 free (label_for_bb);
1066 }
1067
1068 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1069 and scan the sorted vector of cases. Combine the ones jumping to the
1070 same label.
1071 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1072
1073 void
1074 group_case_labels (void)
1075 {
1076 basic_block bb;
1077
1078 FOR_EACH_BB (bb)
1079 {
1080 tree stmt = last_stmt (bb);
1081 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1082 {
1083 tree labels = SWITCH_LABELS (stmt);
1084 int old_size = TREE_VEC_LENGTH (labels);
1085 int i, j, new_size = old_size;
1086 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1087 tree default_label;
1088
1089 /* The default label is always the last case in a switch
1090 statement after gimplification. */
1091 default_label = CASE_LABEL (default_case);
1092
1093 /* Look for possible opportunities to merge cases.
1094 Ignore the last element of the label vector because it
1095 must be the default case. */
1096 i = 0;
1097 while (i < old_size - 1)
1098 {
1099 tree base_case, base_label, base_high;
1100 base_case = TREE_VEC_ELT (labels, i);
1101
1102 gcc_assert (base_case);
1103 base_label = CASE_LABEL (base_case);
1104
1105 /* Discard cases that have the same destination as the
1106 default case. */
1107 if (base_label == default_label)
1108 {
1109 TREE_VEC_ELT (labels, i) = NULL_TREE;
1110 i++;
1111 new_size--;
1112 continue;
1113 }
1114
1115 base_high = CASE_HIGH (base_case) ?
1116 CASE_HIGH (base_case) : CASE_LOW (base_case);
1117 i++;
1118 /* Try to merge case labels. Break out when we reach the end
1119 of the label vector or when we cannot merge the next case
1120 label with the current one. */
1121 while (i < old_size - 1)
1122 {
1123 tree merge_case = TREE_VEC_ELT (labels, i);
1124 tree merge_label = CASE_LABEL (merge_case);
1125 tree t = int_const_binop (PLUS_EXPR, base_high,
1126 integer_one_node, 1);
1127
1128 /* Merge the cases if they jump to the same place,
1129 and their ranges are consecutive. */
1130 if (merge_label == base_label
1131 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1132 {
1133 base_high = CASE_HIGH (merge_case) ?
1134 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1135 CASE_HIGH (base_case) = base_high;
1136 TREE_VEC_ELT (labels, i) = NULL_TREE;
1137 new_size--;
1138 i++;
1139 }
1140 else
1141 break;
1142 }
1143 }
1144
1145 /* Compress the case labels in the label vector, and adjust the
1146 length of the vector. */
1147 for (i = 0, j = 0; i < new_size; i++)
1148 {
1149 while (! TREE_VEC_ELT (labels, j))
1150 j++;
1151 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1152 }
1153 TREE_VEC_LENGTH (labels) = new_size;
1154 }
1155 }
1156 }
1157
1158 /* Checks whether we can merge block B into block A. */
1159
1160 static bool
1161 tree_can_merge_blocks_p (basic_block a, basic_block b)
1162 {
1163 tree stmt;
1164 block_stmt_iterator bsi;
1165 tree phi;
1166
1167 if (!single_succ_p (a))
1168 return false;
1169
1170 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1171 return false;
1172
1173 if (single_succ (a) != b)
1174 return false;
1175
1176 if (!single_pred_p (b))
1177 return false;
1178
1179 if (b == EXIT_BLOCK_PTR)
1180 return false;
1181
1182 /* If A ends by a statement causing exceptions or something similar, we
1183 cannot merge the blocks. */
1184 stmt = last_stmt (a);
1185 if (stmt && stmt_ends_bb_p (stmt))
1186 return false;
1187
1188 /* Do not allow a block with only a non-local label to be merged. */
1189 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1190 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1191 return false;
1192
1193 /* It must be possible to eliminate all phi nodes in B. If ssa form
1194 is not up-to-date, we cannot eliminate any phis. */
1195 phi = phi_nodes (b);
1196 if (phi)
1197 {
1198 if (need_ssa_update_p ())
1199 return false;
1200
1201 for (; phi; phi = PHI_CHAIN (phi))
1202 if (!is_gimple_reg (PHI_RESULT (phi))
1203 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1204 return false;
1205 }
1206
1207 /* Do not remove user labels. */
1208 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1209 {
1210 stmt = bsi_stmt (bsi);
1211 if (TREE_CODE (stmt) != LABEL_EXPR)
1212 break;
1213 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1214 return false;
1215 }
1216
1217 /* Protect the loop latches. */
1218 if (current_loops
1219 && b->loop_father->latch == b)
1220 return false;
1221
1222 return true;
1223 }
1224
1225 /* Replaces all uses of NAME by VAL. */
1226
1227 void
1228 replace_uses_by (tree name, tree val)
1229 {
1230 imm_use_iterator imm_iter;
1231 use_operand_p use;
1232 tree stmt;
1233 edge e;
1234 unsigned i;
1235 VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1236
1237 FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1238 {
1239 stmt = USE_STMT (use);
1240
1241 SET_USE (use, val);
1242
1243 if (TREE_CODE (stmt) == PHI_NODE)
1244 {
1245 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1246 if (e->flags & EDGE_ABNORMAL)
1247 {
1248 /* This can only occur for virtual operands, since
1249 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1250 would prevent replacement. */
1251 gcc_assert (!is_gimple_reg (name));
1252 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1253 }
1254 }
1255 else
1256 VEC_safe_push (tree, heap, stmts, stmt);
1257 }
1258
1259 /* We do not update the statements in the loop above. Consider
1260 x = w * w;
1261
1262 If we performed the update in the first loop, the statement
1263 would be rescanned after first occurrence of w is replaced,
1264 the new uses would be placed to the beginning of the list,
1265 and we would never process them. */
1266 for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1267 {
1268 tree rhs;
1269
1270 fold_stmt_inplace (stmt);
1271
1272 rhs = get_rhs (stmt);
1273 if (TREE_CODE (rhs) == ADDR_EXPR)
1274 recompute_tree_invarant_for_addr_expr (rhs);
1275
1276 /* If the statement could throw and now cannot, we need to prune cfg. */
1277 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
1278 tree_purge_dead_eh_edges (bb_for_stmt (stmt));
1279
1280 mark_new_vars_to_rename (stmt);
1281 }
1282
1283 VEC_free (tree, heap, stmts);
1284
1285 /* Also update the trees stored in loop structures. */
1286 if (current_loops)
1287 {
1288 struct loop *loop;
1289
1290 for (i = 0; i < current_loops->num; i++)
1291 {
1292 loop = current_loops->parray[i];
1293 if (loop)
1294 substitute_in_loop_info (loop, name, val);
1295 }
1296 }
1297 }
1298
1299 /* Merge block B into block A. */
1300
1301 static void
1302 tree_merge_blocks (basic_block a, basic_block b)
1303 {
1304 block_stmt_iterator bsi;
1305 tree_stmt_iterator last;
1306 tree phi;
1307
1308 if (dump_file)
1309 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1310
1311 /* Remove all single-valued PHI nodes from block B of the form
1312 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1313 bsi = bsi_last (a);
1314 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1315 {
1316 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1317 tree copy;
1318
1319 if (!may_propagate_copy (def, use))
1320 {
1321 gcc_assert (is_gimple_reg (def));
1322
1323 /* Note that just emitting the copies is fine -- there is no problem
1324 with ordering of phi nodes. This is because A is the single
1325 predecessor of B, therefore results of the phi nodes cannot
1326 appear as arguments of the phi nodes. */
1327 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1328 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1329 SET_PHI_RESULT (phi, NULL_TREE);
1330 SSA_NAME_DEF_STMT (def) = copy;
1331 }
1332 else
1333 replace_uses_by (def, use);
1334
1335 remove_phi_node (phi, NULL);
1336 }
1337
1338 /* Ensure that B follows A. */
1339 move_block_after (b, a);
1340
1341 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1342 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1343
1344 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1345 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1346 {
1347 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1348 {
1349 tree label = bsi_stmt (bsi);
1350
1351 bsi_remove (&bsi);
1352 /* Now that we can thread computed gotos, we might have
1353 a situation where we have a forced label in block B
1354 However, the label at the start of block B might still be
1355 used in other ways (think about the runtime checking for
1356 Fortran assigned gotos). So we can not just delete the
1357 label. Instead we move the label to the start of block A. */
1358 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1359 {
1360 block_stmt_iterator dest_bsi = bsi_start (a);
1361 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1362 }
1363 }
1364 else
1365 {
1366 set_bb_for_stmt (bsi_stmt (bsi), a);
1367 bsi_next (&bsi);
1368 }
1369 }
1370
1371 /* Merge the chains. */
1372 last = tsi_last (a->stmt_list);
1373 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1374 b->stmt_list = NULL;
1375 }
1376
1377
1378 /* Walk the function tree removing unnecessary statements.
1379
1380 * Empty statement nodes are removed
1381
1382 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1383
1384 * Unnecessary COND_EXPRs are removed
1385
1386 * Some unnecessary BIND_EXPRs are removed
1387
1388 Clearly more work could be done. The trick is doing the analysis
1389 and removal fast enough to be a net improvement in compile times.
1390
1391 Note that when we remove a control structure such as a COND_EXPR
1392 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1393 to ensure we eliminate all the useless code. */
1394
1395 struct rus_data
1396 {
1397 tree *last_goto;
1398 bool repeat;
1399 bool may_throw;
1400 bool may_branch;
1401 bool has_label;
1402 };
1403
1404 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1405
1406 static bool
1407 remove_useless_stmts_warn_notreached (tree stmt)
1408 {
1409 if (EXPR_HAS_LOCATION (stmt))
1410 {
1411 location_t loc = EXPR_LOCATION (stmt);
1412 if (LOCATION_LINE (loc) > 0)
1413 {
1414 warning (0, "%Hwill never be executed", &loc);
1415 return true;
1416 }
1417 }
1418
1419 switch (TREE_CODE (stmt))
1420 {
1421 case STATEMENT_LIST:
1422 {
1423 tree_stmt_iterator i;
1424 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1425 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1426 return true;
1427 }
1428 break;
1429
1430 case COND_EXPR:
1431 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1432 return true;
1433 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1434 return true;
1435 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1436 return true;
1437 break;
1438
1439 case TRY_FINALLY_EXPR:
1440 case TRY_CATCH_EXPR:
1441 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1442 return true;
1443 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1444 return true;
1445 break;
1446
1447 case CATCH_EXPR:
1448 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1449 case EH_FILTER_EXPR:
1450 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1451 case BIND_EXPR:
1452 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1453
1454 default:
1455 /* Not a live container. */
1456 break;
1457 }
1458
1459 return false;
1460 }
1461
1462 static void
1463 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1464 {
1465 tree then_clause, else_clause, cond;
1466 bool save_has_label, then_has_label, else_has_label;
1467
1468 save_has_label = data->has_label;
1469 data->has_label = false;
1470 data->last_goto = NULL;
1471
1472 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1473
1474 then_has_label = data->has_label;
1475 data->has_label = false;
1476 data->last_goto = NULL;
1477
1478 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1479
1480 else_has_label = data->has_label;
1481 data->has_label = save_has_label | then_has_label | else_has_label;
1482
1483 then_clause = COND_EXPR_THEN (*stmt_p);
1484 else_clause = COND_EXPR_ELSE (*stmt_p);
1485 cond = fold (COND_EXPR_COND (*stmt_p));
1486
1487 /* If neither arm does anything at all, we can remove the whole IF. */
1488 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1489 {
1490 *stmt_p = build_empty_stmt ();
1491 data->repeat = true;
1492 }
1493
1494 /* If there are no reachable statements in an arm, then we can
1495 zap the entire conditional. */
1496 else if (integer_nonzerop (cond) && !else_has_label)
1497 {
1498 if (warn_notreached)
1499 remove_useless_stmts_warn_notreached (else_clause);
1500 *stmt_p = then_clause;
1501 data->repeat = true;
1502 }
1503 else if (integer_zerop (cond) && !then_has_label)
1504 {
1505 if (warn_notreached)
1506 remove_useless_stmts_warn_notreached (then_clause);
1507 *stmt_p = else_clause;
1508 data->repeat = true;
1509 }
1510
1511 /* Check a couple of simple things on then/else with single stmts. */
1512 else
1513 {
1514 tree then_stmt = expr_only (then_clause);
1515 tree else_stmt = expr_only (else_clause);
1516
1517 /* Notice branches to a common destination. */
1518 if (then_stmt && else_stmt
1519 && TREE_CODE (then_stmt) == GOTO_EXPR
1520 && TREE_CODE (else_stmt) == GOTO_EXPR
1521 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1522 {
1523 *stmt_p = then_stmt;
1524 data->repeat = true;
1525 }
1526
1527 /* If the THEN/ELSE clause merely assigns a value to a variable or
1528 parameter which is already known to contain that value, then
1529 remove the useless THEN/ELSE clause. */
1530 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1531 {
1532 if (else_stmt
1533 && TREE_CODE (else_stmt) == MODIFY_EXPR
1534 && TREE_OPERAND (else_stmt, 0) == cond
1535 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1536 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1537 }
1538 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1539 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1540 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1541 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1542 {
1543 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1544 ? then_stmt : else_stmt);
1545 tree *location = (TREE_CODE (cond) == EQ_EXPR
1546 ? &COND_EXPR_THEN (*stmt_p)
1547 : &COND_EXPR_ELSE (*stmt_p));
1548
1549 if (stmt
1550 && TREE_CODE (stmt) == MODIFY_EXPR
1551 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1552 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1553 *location = alloc_stmt_list ();
1554 }
1555 }
1556
1557 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1558 would be re-introduced during lowering. */
1559 data->last_goto = NULL;
1560 }
1561
1562
1563 static void
1564 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1565 {
1566 bool save_may_branch, save_may_throw;
1567 bool this_may_branch, this_may_throw;
1568
1569 /* Collect may_branch and may_throw information for the body only. */
1570 save_may_branch = data->may_branch;
1571 save_may_throw = data->may_throw;
1572 data->may_branch = false;
1573 data->may_throw = false;
1574 data->last_goto = NULL;
1575
1576 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1577
1578 this_may_branch = data->may_branch;
1579 this_may_throw = data->may_throw;
1580 data->may_branch |= save_may_branch;
1581 data->may_throw |= save_may_throw;
1582 data->last_goto = NULL;
1583
1584 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1585
1586 /* If the body is empty, then we can emit the FINALLY block without
1587 the enclosing TRY_FINALLY_EXPR. */
1588 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1589 {
1590 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1591 data->repeat = true;
1592 }
1593
1594 /* If the handler is empty, then we can emit the TRY block without
1595 the enclosing TRY_FINALLY_EXPR. */
1596 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1597 {
1598 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1599 data->repeat = true;
1600 }
1601
1602 /* If the body neither throws, nor branches, then we can safely
1603 string the TRY and FINALLY blocks together. */
1604 else if (!this_may_branch && !this_may_throw)
1605 {
1606 tree stmt = *stmt_p;
1607 *stmt_p = TREE_OPERAND (stmt, 0);
1608 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1609 data->repeat = true;
1610 }
1611 }
1612
1613
1614 static void
1615 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1616 {
1617 bool save_may_throw, this_may_throw;
1618 tree_stmt_iterator i;
1619 tree stmt;
1620
1621 /* Collect may_throw information for the body only. */
1622 save_may_throw = data->may_throw;
1623 data->may_throw = false;
1624 data->last_goto = NULL;
1625
1626 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1627
1628 this_may_throw = data->may_throw;
1629 data->may_throw = save_may_throw;
1630
1631 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1632 if (!this_may_throw)
1633 {
1634 if (warn_notreached)
1635 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1636 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1637 data->repeat = true;
1638 return;
1639 }
1640
1641 /* Process the catch clause specially. We may be able to tell that
1642 no exceptions propagate past this point. */
1643
1644 this_may_throw = true;
1645 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1646 stmt = tsi_stmt (i);
1647 data->last_goto = NULL;
1648
1649 switch (TREE_CODE (stmt))
1650 {
1651 case CATCH_EXPR:
1652 for (; !tsi_end_p (i); tsi_next (&i))
1653 {
1654 stmt = tsi_stmt (i);
1655 /* If we catch all exceptions, then the body does not
1656 propagate exceptions past this point. */
1657 if (CATCH_TYPES (stmt) == NULL)
1658 this_may_throw = false;
1659 data->last_goto = NULL;
1660 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1661 }
1662 break;
1663
1664 case EH_FILTER_EXPR:
1665 if (EH_FILTER_MUST_NOT_THROW (stmt))
1666 this_may_throw = false;
1667 else if (EH_FILTER_TYPES (stmt) == NULL)
1668 this_may_throw = false;
1669 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1670 break;
1671
1672 default:
1673 /* Otherwise this is a cleanup. */
1674 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1675
1676 /* If the cleanup is empty, then we can emit the TRY block without
1677 the enclosing TRY_CATCH_EXPR. */
1678 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1679 {
1680 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1681 data->repeat = true;
1682 }
1683 break;
1684 }
1685 data->may_throw |= this_may_throw;
1686 }
1687
1688
1689 static void
1690 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1691 {
1692 tree block;
1693
1694 /* First remove anything underneath the BIND_EXPR. */
1695 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1696
1697 /* If the BIND_EXPR has no variables, then we can pull everything
1698 up one level and remove the BIND_EXPR, unless this is the toplevel
1699 BIND_EXPR for the current function or an inlined function.
1700
1701 When this situation occurs we will want to apply this
1702 optimization again. */
1703 block = BIND_EXPR_BLOCK (*stmt_p);
1704 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1705 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1706 && (! block
1707 || ! BLOCK_ABSTRACT_ORIGIN (block)
1708 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1709 != FUNCTION_DECL)))
1710 {
1711 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1712 data->repeat = true;
1713 }
1714 }
1715
1716
1717 static void
1718 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1719 {
1720 tree dest = GOTO_DESTINATION (*stmt_p);
1721
1722 data->may_branch = true;
1723 data->last_goto = NULL;
1724
1725 /* Record the last goto expr, so that we can delete it if unnecessary. */
1726 if (TREE_CODE (dest) == LABEL_DECL)
1727 data->last_goto = stmt_p;
1728 }
1729
1730
1731 static void
1732 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1733 {
1734 tree label = LABEL_EXPR_LABEL (*stmt_p);
1735
1736 data->has_label = true;
1737
1738 /* We do want to jump across non-local label receiver code. */
1739 if (DECL_NONLOCAL (label))
1740 data->last_goto = NULL;
1741
1742 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1743 {
1744 *data->last_goto = build_empty_stmt ();
1745 data->repeat = true;
1746 }
1747
1748 /* ??? Add something here to delete unused labels. */
1749 }
1750
1751
1752 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1753 decl. This allows us to eliminate redundant or useless
1754 calls to "const" functions.
1755
1756 Gimplifier already does the same operation, but we may notice functions
1757 being const and pure once their calls has been gimplified, so we need
1758 to update the flag. */
1759
1760 static void
1761 update_call_expr_flags (tree call)
1762 {
1763 tree decl = get_callee_fndecl (call);
1764 if (!decl)
1765 return;
1766 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1767 TREE_SIDE_EFFECTS (call) = 0;
1768 if (TREE_NOTHROW (decl))
1769 TREE_NOTHROW (call) = 1;
1770 }
1771
1772
1773 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1774
1775 void
1776 notice_special_calls (tree t)
1777 {
1778 int flags = call_expr_flags (t);
1779
1780 if (flags & ECF_MAY_BE_ALLOCA)
1781 current_function_calls_alloca = true;
1782 if (flags & ECF_RETURNS_TWICE)
1783 current_function_calls_setjmp = true;
1784 }
1785
1786
1787 /* Clear flags set by notice_special_calls. Used by dead code removal
1788 to update the flags. */
1789
1790 void
1791 clear_special_calls (void)
1792 {
1793 current_function_calls_alloca = false;
1794 current_function_calls_setjmp = false;
1795 }
1796
1797
1798 static void
1799 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1800 {
1801 tree t = *tp, op;
1802
1803 switch (TREE_CODE (t))
1804 {
1805 case COND_EXPR:
1806 remove_useless_stmts_cond (tp, data);
1807 break;
1808
1809 case TRY_FINALLY_EXPR:
1810 remove_useless_stmts_tf (tp, data);
1811 break;
1812
1813 case TRY_CATCH_EXPR:
1814 remove_useless_stmts_tc (tp, data);
1815 break;
1816
1817 case BIND_EXPR:
1818 remove_useless_stmts_bind (tp, data);
1819 break;
1820
1821 case GOTO_EXPR:
1822 remove_useless_stmts_goto (tp, data);
1823 break;
1824
1825 case LABEL_EXPR:
1826 remove_useless_stmts_label (tp, data);
1827 break;
1828
1829 case RETURN_EXPR:
1830 fold_stmt (tp);
1831 data->last_goto = NULL;
1832 data->may_branch = true;
1833 break;
1834
1835 case CALL_EXPR:
1836 fold_stmt (tp);
1837 data->last_goto = NULL;
1838 notice_special_calls (t);
1839 update_call_expr_flags (t);
1840 if (tree_could_throw_p (t))
1841 data->may_throw = true;
1842 break;
1843
1844 case MODIFY_EXPR:
1845 data->last_goto = NULL;
1846 fold_stmt (tp);
1847 op = get_call_expr_in (t);
1848 if (op)
1849 {
1850 update_call_expr_flags (op);
1851 notice_special_calls (op);
1852 }
1853 if (tree_could_throw_p (t))
1854 data->may_throw = true;
1855 break;
1856
1857 case STATEMENT_LIST:
1858 {
1859 tree_stmt_iterator i = tsi_start (t);
1860 while (!tsi_end_p (i))
1861 {
1862 t = tsi_stmt (i);
1863 if (IS_EMPTY_STMT (t))
1864 {
1865 tsi_delink (&i);
1866 continue;
1867 }
1868
1869 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1870
1871 t = tsi_stmt (i);
1872 if (TREE_CODE (t) == STATEMENT_LIST)
1873 {
1874 tsi_link_before (&i, t, TSI_SAME_STMT);
1875 tsi_delink (&i);
1876 }
1877 else
1878 tsi_next (&i);
1879 }
1880 }
1881 break;
1882 case ASM_EXPR:
1883 fold_stmt (tp);
1884 data->last_goto = NULL;
1885 break;
1886
1887 default:
1888 data->last_goto = NULL;
1889 break;
1890 }
1891 }
1892
1893 static void
1894 remove_useless_stmts (void)
1895 {
1896 struct rus_data data;
1897
1898 clear_special_calls ();
1899
1900 do
1901 {
1902 memset (&data, 0, sizeof (data));
1903 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1904 }
1905 while (data.repeat);
1906 }
1907
1908
1909 struct tree_opt_pass pass_remove_useless_stmts =
1910 {
1911 "useless", /* name */
1912 NULL, /* gate */
1913 remove_useless_stmts, /* execute */
1914 NULL, /* sub */
1915 NULL, /* next */
1916 0, /* static_pass_number */
1917 0, /* tv_id */
1918 PROP_gimple_any, /* properties_required */
1919 0, /* properties_provided */
1920 0, /* properties_destroyed */
1921 0, /* todo_flags_start */
1922 TODO_dump_func, /* todo_flags_finish */
1923 0 /* letter */
1924 };
1925
1926 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1927
1928 static void
1929 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1930 {
1931 tree phi;
1932
1933 /* Since this block is no longer reachable, we can just delete all
1934 of its PHI nodes. */
1935 phi = phi_nodes (bb);
1936 while (phi)
1937 {
1938 tree next = PHI_CHAIN (phi);
1939 remove_phi_node (phi, NULL_TREE);
1940 phi = next;
1941 }
1942
1943 /* Remove edges to BB's successors. */
1944 while (EDGE_COUNT (bb->succs) > 0)
1945 remove_edge (EDGE_SUCC (bb, 0));
1946 }
1947
1948
1949 /* Remove statements of basic block BB. */
1950
1951 static void
1952 remove_bb (basic_block bb)
1953 {
1954 block_stmt_iterator i;
1955 #ifdef USE_MAPPED_LOCATION
1956 source_location loc = UNKNOWN_LOCATION;
1957 #else
1958 source_locus loc = 0;
1959 #endif
1960
1961 if (dump_file)
1962 {
1963 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1964 if (dump_flags & TDF_DETAILS)
1965 {
1966 dump_bb (bb, dump_file, 0);
1967 fprintf (dump_file, "\n");
1968 }
1969 }
1970
1971 /* If we remove the header or the latch of a loop, mark the loop for
1972 removal by setting its header and latch to NULL. */
1973 if (current_loops)
1974 {
1975 struct loop *loop = bb->loop_father;
1976
1977 if (loop->latch == bb
1978 || loop->header == bb)
1979 {
1980 loop->latch = NULL;
1981 loop->header = NULL;
1982 }
1983 }
1984
1985 /* Remove all the instructions in the block. */
1986 for (i = bsi_start (bb); !bsi_end_p (i);)
1987 {
1988 tree stmt = bsi_stmt (i);
1989 if (TREE_CODE (stmt) == LABEL_EXPR
1990 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
1991 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
1992 {
1993 basic_block new_bb;
1994 block_stmt_iterator new_bsi;
1995
1996 /* A non-reachable non-local label may still be referenced.
1997 But it no longer needs to carry the extra semantics of
1998 non-locality. */
1999 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2000 {
2001 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2002 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2003 }
2004
2005 new_bb = bb->prev_bb;
2006 new_bsi = bsi_start (new_bb);
2007 bsi_remove (&i);
2008 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2009 }
2010 else
2011 {
2012 /* Release SSA definitions if we are in SSA. Note that we
2013 may be called when not in SSA. For example,
2014 final_cleanup calls this function via
2015 cleanup_tree_cfg. */
2016 if (in_ssa_p)
2017 release_defs (stmt);
2018
2019 bsi_remove (&i);
2020 }
2021
2022 /* Don't warn for removed gotos. Gotos are often removed due to
2023 jump threading, thus resulting in bogus warnings. Not great,
2024 since this way we lose warnings for gotos in the original
2025 program that are indeed unreachable. */
2026 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2027 {
2028 #ifdef USE_MAPPED_LOCATION
2029 if (EXPR_HAS_LOCATION (stmt))
2030 loc = EXPR_LOCATION (stmt);
2031 #else
2032 source_locus t;
2033 t = EXPR_LOCUS (stmt);
2034 if (t && LOCATION_LINE (*t) > 0)
2035 loc = t;
2036 #endif
2037 }
2038 }
2039
2040 /* If requested, give a warning that the first statement in the
2041 block is unreachable. We walk statements backwards in the
2042 loop above, so the last statement we process is the first statement
2043 in the block. */
2044 #ifdef USE_MAPPED_LOCATION
2045 if (loc > BUILTINS_LOCATION)
2046 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2047 #else
2048 if (loc)
2049 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2050 #endif
2051
2052 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2053 }
2054
2055
2056 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2057 predicate VAL, return the edge that will be taken out of the block.
2058 If VAL does not match a unique edge, NULL is returned. */
2059
2060 edge
2061 find_taken_edge (basic_block bb, tree val)
2062 {
2063 tree stmt;
2064
2065 stmt = last_stmt (bb);
2066
2067 gcc_assert (stmt);
2068 gcc_assert (is_ctrl_stmt (stmt));
2069 gcc_assert (val);
2070
2071 if (! is_gimple_min_invariant (val))
2072 return NULL;
2073
2074 if (TREE_CODE (stmt) == COND_EXPR)
2075 return find_taken_edge_cond_expr (bb, val);
2076
2077 if (TREE_CODE (stmt) == SWITCH_EXPR)
2078 return find_taken_edge_switch_expr (bb, val);
2079
2080 if (computed_goto_p (stmt))
2081 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2082
2083 gcc_unreachable ();
2084 }
2085
2086 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2087 statement, determine which of the outgoing edges will be taken out of the
2088 block. Return NULL if either edge may be taken. */
2089
2090 static edge
2091 find_taken_edge_computed_goto (basic_block bb, tree val)
2092 {
2093 basic_block dest;
2094 edge e = NULL;
2095
2096 dest = label_to_block (val);
2097 if (dest)
2098 {
2099 e = find_edge (bb, dest);
2100 gcc_assert (e != NULL);
2101 }
2102
2103 return e;
2104 }
2105
2106 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2107 statement, determine which of the two edges will be taken out of the
2108 block. Return NULL if either edge may be taken. */
2109
2110 static edge
2111 find_taken_edge_cond_expr (basic_block bb, tree val)
2112 {
2113 edge true_edge, false_edge;
2114
2115 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2116
2117 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2118 return (zero_p (val) ? false_edge : true_edge);
2119 }
2120
2121 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2122 statement, determine which edge will be taken out of the block. Return
2123 NULL if any edge may be taken. */
2124
2125 static edge
2126 find_taken_edge_switch_expr (basic_block bb, tree val)
2127 {
2128 tree switch_expr, taken_case;
2129 basic_block dest_bb;
2130 edge e;
2131
2132 switch_expr = last_stmt (bb);
2133 taken_case = find_case_label_for_value (switch_expr, val);
2134 dest_bb = label_to_block (CASE_LABEL (taken_case));
2135
2136 e = find_edge (bb, dest_bb);
2137 gcc_assert (e);
2138 return e;
2139 }
2140
2141
2142 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2143 We can make optimal use here of the fact that the case labels are
2144 sorted: We can do a binary search for a case matching VAL. */
2145
2146 static tree
2147 find_case_label_for_value (tree switch_expr, tree val)
2148 {
2149 tree vec = SWITCH_LABELS (switch_expr);
2150 size_t low, high, n = TREE_VEC_LENGTH (vec);
2151 tree default_case = TREE_VEC_ELT (vec, n - 1);
2152
2153 for (low = -1, high = n - 1; high - low > 1; )
2154 {
2155 size_t i = (high + low) / 2;
2156 tree t = TREE_VEC_ELT (vec, i);
2157 int cmp;
2158
2159 /* Cache the result of comparing CASE_LOW and val. */
2160 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2161
2162 if (cmp > 0)
2163 high = i;
2164 else
2165 low = i;
2166
2167 if (CASE_HIGH (t) == NULL)
2168 {
2169 /* A singe-valued case label. */
2170 if (cmp == 0)
2171 return t;
2172 }
2173 else
2174 {
2175 /* A case range. We can only handle integer ranges. */
2176 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2177 return t;
2178 }
2179 }
2180
2181 return default_case;
2182 }
2183
2184
2185
2186
2187 /*---------------------------------------------------------------------------
2188 Debugging functions
2189 ---------------------------------------------------------------------------*/
2190
2191 /* Dump tree-specific information of block BB to file OUTF. */
2192
2193 void
2194 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2195 {
2196 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2197 }
2198
2199
2200 /* Dump a basic block on stderr. */
2201
2202 void
2203 debug_tree_bb (basic_block bb)
2204 {
2205 dump_bb (bb, stderr, 0);
2206 }
2207
2208
2209 /* Dump basic block with index N on stderr. */
2210
2211 basic_block
2212 debug_tree_bb_n (int n)
2213 {
2214 debug_tree_bb (BASIC_BLOCK (n));
2215 return BASIC_BLOCK (n);
2216 }
2217
2218
2219 /* Dump the CFG on stderr.
2220
2221 FLAGS are the same used by the tree dumping functions
2222 (see TDF_* in tree.h). */
2223
2224 void
2225 debug_tree_cfg (int flags)
2226 {
2227 dump_tree_cfg (stderr, flags);
2228 }
2229
2230
2231 /* Dump the program showing basic block boundaries on the given FILE.
2232
2233 FLAGS are the same used by the tree dumping functions (see TDF_* in
2234 tree.h). */
2235
2236 void
2237 dump_tree_cfg (FILE *file, int flags)
2238 {
2239 if (flags & TDF_DETAILS)
2240 {
2241 const char *funcname
2242 = lang_hooks.decl_printable_name (current_function_decl, 2);
2243
2244 fputc ('\n', file);
2245 fprintf (file, ";; Function %s\n\n", funcname);
2246 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2247 n_basic_blocks, n_edges, last_basic_block);
2248
2249 brief_dump_cfg (file);
2250 fprintf (file, "\n");
2251 }
2252
2253 if (flags & TDF_STATS)
2254 dump_cfg_stats (file);
2255
2256 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2257 }
2258
2259
2260 /* Dump CFG statistics on FILE. */
2261
2262 void
2263 dump_cfg_stats (FILE *file)
2264 {
2265 static long max_num_merged_labels = 0;
2266 unsigned long size, total = 0;
2267 long num_edges;
2268 basic_block bb;
2269 const char * const fmt_str = "%-30s%-13s%12s\n";
2270 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2271 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2272 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2273 const char *funcname
2274 = lang_hooks.decl_printable_name (current_function_decl, 2);
2275
2276
2277 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2278
2279 fprintf (file, "---------------------------------------------------------\n");
2280 fprintf (file, fmt_str, "", " Number of ", "Memory");
2281 fprintf (file, fmt_str, "", " instances ", "used ");
2282 fprintf (file, "---------------------------------------------------------\n");
2283
2284 size = n_basic_blocks * sizeof (struct basic_block_def);
2285 total += size;
2286 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2287 SCALE (size), LABEL (size));
2288
2289 num_edges = 0;
2290 FOR_EACH_BB (bb)
2291 num_edges += EDGE_COUNT (bb->succs);
2292 size = num_edges * sizeof (struct edge_def);
2293 total += size;
2294 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2295
2296 fprintf (file, "---------------------------------------------------------\n");
2297 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2298 LABEL (total));
2299 fprintf (file, "---------------------------------------------------------\n");
2300 fprintf (file, "\n");
2301
2302 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2303 max_num_merged_labels = cfg_stats.num_merged_labels;
2304
2305 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2306 cfg_stats.num_merged_labels, max_num_merged_labels);
2307
2308 fprintf (file, "\n");
2309 }
2310
2311
2312 /* Dump CFG statistics on stderr. Keep extern so that it's always
2313 linked in the final executable. */
2314
2315 void
2316 debug_cfg_stats (void)
2317 {
2318 dump_cfg_stats (stderr);
2319 }
2320
2321
2322 /* Dump the flowgraph to a .vcg FILE. */
2323
2324 static void
2325 tree_cfg2vcg (FILE *file)
2326 {
2327 edge e;
2328 edge_iterator ei;
2329 basic_block bb;
2330 const char *funcname
2331 = lang_hooks.decl_printable_name (current_function_decl, 2);
2332
2333 /* Write the file header. */
2334 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2335 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2336 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2337
2338 /* Write blocks and edges. */
2339 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2340 {
2341 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2342 e->dest->index);
2343
2344 if (e->flags & EDGE_FAKE)
2345 fprintf (file, " linestyle: dotted priority: 10");
2346 else
2347 fprintf (file, " linestyle: solid priority: 100");
2348
2349 fprintf (file, " }\n");
2350 }
2351 fputc ('\n', file);
2352
2353 FOR_EACH_BB (bb)
2354 {
2355 enum tree_code head_code, end_code;
2356 const char *head_name, *end_name;
2357 int head_line = 0;
2358 int end_line = 0;
2359 tree first = first_stmt (bb);
2360 tree last = last_stmt (bb);
2361
2362 if (first)
2363 {
2364 head_code = TREE_CODE (first);
2365 head_name = tree_code_name[head_code];
2366 head_line = get_lineno (first);
2367 }
2368 else
2369 head_name = "no-statement";
2370
2371 if (last)
2372 {
2373 end_code = TREE_CODE (last);
2374 end_name = tree_code_name[end_code];
2375 end_line = get_lineno (last);
2376 }
2377 else
2378 end_name = "no-statement";
2379
2380 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2381 bb->index, bb->index, head_name, head_line, end_name,
2382 end_line);
2383
2384 FOR_EACH_EDGE (e, ei, bb->succs)
2385 {
2386 if (e->dest == EXIT_BLOCK_PTR)
2387 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2388 else
2389 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2390
2391 if (e->flags & EDGE_FAKE)
2392 fprintf (file, " priority: 10 linestyle: dotted");
2393 else
2394 fprintf (file, " priority: 100 linestyle: solid");
2395
2396 fprintf (file, " }\n");
2397 }
2398
2399 if (bb->next_bb != EXIT_BLOCK_PTR)
2400 fputc ('\n', file);
2401 }
2402
2403 fputs ("}\n\n", file);
2404 }
2405
2406
2407
2408 /*---------------------------------------------------------------------------
2409 Miscellaneous helpers
2410 ---------------------------------------------------------------------------*/
2411
2412 /* Return true if T represents a stmt that always transfers control. */
2413
2414 bool
2415 is_ctrl_stmt (tree t)
2416 {
2417 return (TREE_CODE (t) == COND_EXPR
2418 || TREE_CODE (t) == SWITCH_EXPR
2419 || TREE_CODE (t) == GOTO_EXPR
2420 || TREE_CODE (t) == RETURN_EXPR
2421 || TREE_CODE (t) == RESX_EXPR);
2422 }
2423
2424
2425 /* Return true if T is a statement that may alter the flow of control
2426 (e.g., a call to a non-returning function). */
2427
2428 bool
2429 is_ctrl_altering_stmt (tree t)
2430 {
2431 tree call;
2432
2433 gcc_assert (t);
2434 call = get_call_expr_in (t);
2435 if (call)
2436 {
2437 /* A non-pure/const CALL_EXPR alters flow control if the current
2438 function has nonlocal labels. */
2439 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2440 return true;
2441
2442 /* A CALL_EXPR also alters control flow if it does not return. */
2443 if (call_expr_flags (call) & ECF_NORETURN)
2444 return true;
2445 }
2446
2447 /* If a statement can throw, it alters control flow. */
2448 return tree_can_throw_internal (t);
2449 }
2450
2451
2452 /* Return true if T is a computed goto. */
2453
2454 bool
2455 computed_goto_p (tree t)
2456 {
2457 return (TREE_CODE (t) == GOTO_EXPR
2458 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2459 }
2460
2461
2462 /* Checks whether EXPR is a simple local goto. */
2463
2464 bool
2465 simple_goto_p (tree expr)
2466 {
2467 return (TREE_CODE (expr) == GOTO_EXPR
2468 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2469 }
2470
2471
2472 /* Return true if T should start a new basic block. PREV_T is the
2473 statement preceding T. It is used when T is a label or a case label.
2474 Labels should only start a new basic block if their previous statement
2475 wasn't a label. Otherwise, sequence of labels would generate
2476 unnecessary basic blocks that only contain a single label. */
2477
2478 static inline bool
2479 stmt_starts_bb_p (tree t, tree prev_t)
2480 {
2481 if (t == NULL_TREE)
2482 return false;
2483
2484 /* LABEL_EXPRs start a new basic block only if the preceding
2485 statement wasn't a label of the same type. This prevents the
2486 creation of consecutive blocks that have nothing but a single
2487 label. */
2488 if (TREE_CODE (t) == LABEL_EXPR)
2489 {
2490 /* Nonlocal and computed GOTO targets always start a new block. */
2491 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2492 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2493 return true;
2494
2495 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2496 {
2497 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2498 return true;
2499
2500 cfg_stats.num_merged_labels++;
2501 return false;
2502 }
2503 else
2504 return true;
2505 }
2506
2507 return false;
2508 }
2509
2510
2511 /* Return true if T should end a basic block. */
2512
2513 bool
2514 stmt_ends_bb_p (tree t)
2515 {
2516 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2517 }
2518
2519
2520 /* Add gotos that used to be represented implicitly in the CFG. */
2521
2522 void
2523 disband_implicit_edges (void)
2524 {
2525 basic_block bb;
2526 block_stmt_iterator last;
2527 edge e;
2528 edge_iterator ei;
2529 tree stmt, label;
2530
2531 FOR_EACH_BB (bb)
2532 {
2533 last = bsi_last (bb);
2534 stmt = last_stmt (bb);
2535
2536 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2537 {
2538 /* Remove superfluous gotos from COND_EXPR branches. Moved
2539 from cfg_remove_useless_stmts here since it violates the
2540 invariants for tree--cfg correspondence and thus fits better
2541 here where we do it anyway. */
2542 e = find_edge (bb, bb->next_bb);
2543 if (e)
2544 {
2545 if (e->flags & EDGE_TRUE_VALUE)
2546 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2547 else if (e->flags & EDGE_FALSE_VALUE)
2548 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2549 else
2550 gcc_unreachable ();
2551 e->flags |= EDGE_FALLTHRU;
2552 }
2553
2554 continue;
2555 }
2556
2557 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2558 {
2559 /* Remove the RETURN_EXPR if we may fall though to the exit
2560 instead. */
2561 gcc_assert (single_succ_p (bb));
2562 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2563
2564 if (bb->next_bb == EXIT_BLOCK_PTR
2565 && !TREE_OPERAND (stmt, 0))
2566 {
2567 bsi_remove (&last);
2568 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2569 }
2570 continue;
2571 }
2572
2573 /* There can be no fallthru edge if the last statement is a control
2574 one. */
2575 if (stmt && is_ctrl_stmt (stmt))
2576 continue;
2577
2578 /* Find a fallthru edge and emit the goto if necessary. */
2579 FOR_EACH_EDGE (e, ei, bb->succs)
2580 if (e->flags & EDGE_FALLTHRU)
2581 break;
2582
2583 if (!e || e->dest == bb->next_bb)
2584 continue;
2585
2586 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2587 label = tree_block_label (e->dest);
2588
2589 stmt = build1 (GOTO_EXPR, void_type_node, label);
2590 #ifdef USE_MAPPED_LOCATION
2591 SET_EXPR_LOCATION (stmt, e->goto_locus);
2592 #else
2593 SET_EXPR_LOCUS (stmt, e->goto_locus);
2594 #endif
2595 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2596 e->flags &= ~EDGE_FALLTHRU;
2597 }
2598 }
2599
2600 /* Remove block annotations and other datastructures. */
2601
2602 void
2603 delete_tree_cfg_annotations (void)
2604 {
2605 label_to_block_map = NULL;
2606 }
2607
2608
2609 /* Return the first statement in basic block BB. */
2610
2611 tree
2612 first_stmt (basic_block bb)
2613 {
2614 block_stmt_iterator i = bsi_start (bb);
2615 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2616 }
2617
2618
2619 /* Return the last statement in basic block BB. */
2620
2621 tree
2622 last_stmt (basic_block bb)
2623 {
2624 block_stmt_iterator b = bsi_last (bb);
2625 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2626 }
2627
2628
2629 /* Return a pointer to the last statement in block BB. */
2630
2631 tree *
2632 last_stmt_ptr (basic_block bb)
2633 {
2634 block_stmt_iterator last = bsi_last (bb);
2635 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2636 }
2637
2638
2639 /* Return the last statement of an otherwise empty block. Return NULL
2640 if the block is totally empty, or if it contains more than one
2641 statement. */
2642
2643 tree
2644 last_and_only_stmt (basic_block bb)
2645 {
2646 block_stmt_iterator i = bsi_last (bb);
2647 tree last, prev;
2648
2649 if (bsi_end_p (i))
2650 return NULL_TREE;
2651
2652 last = bsi_stmt (i);
2653 bsi_prev (&i);
2654 if (bsi_end_p (i))
2655 return last;
2656
2657 /* Empty statements should no longer appear in the instruction stream.
2658 Everything that might have appeared before should be deleted by
2659 remove_useless_stmts, and the optimizers should just bsi_remove
2660 instead of smashing with build_empty_stmt.
2661
2662 Thus the only thing that should appear here in a block containing
2663 one executable statement is a label. */
2664 prev = bsi_stmt (i);
2665 if (TREE_CODE (prev) == LABEL_EXPR)
2666 return last;
2667 else
2668 return NULL_TREE;
2669 }
2670
2671
2672 /* Mark BB as the basic block holding statement T. */
2673
2674 void
2675 set_bb_for_stmt (tree t, basic_block bb)
2676 {
2677 if (TREE_CODE (t) == PHI_NODE)
2678 PHI_BB (t) = bb;
2679 else if (TREE_CODE (t) == STATEMENT_LIST)
2680 {
2681 tree_stmt_iterator i;
2682 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2683 set_bb_for_stmt (tsi_stmt (i), bb);
2684 }
2685 else
2686 {
2687 stmt_ann_t ann = get_stmt_ann (t);
2688 ann->bb = bb;
2689
2690 /* If the statement is a label, add the label to block-to-labels map
2691 so that we can speed up edge creation for GOTO_EXPRs. */
2692 if (TREE_CODE (t) == LABEL_EXPR)
2693 {
2694 int uid;
2695
2696 t = LABEL_EXPR_LABEL (t);
2697 uid = LABEL_DECL_UID (t);
2698 if (uid == -1)
2699 {
2700 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2701 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2702 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2703 }
2704 else
2705 /* We're moving an existing label. Make sure that we've
2706 removed it from the old block. */
2707 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2708 VARRAY_BB (label_to_block_map, uid) = bb;
2709 }
2710 }
2711 }
2712
2713 /* Finds iterator for STMT. */
2714
2715 extern block_stmt_iterator
2716 bsi_for_stmt (tree stmt)
2717 {
2718 block_stmt_iterator bsi;
2719
2720 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2721 if (bsi_stmt (bsi) == stmt)
2722 return bsi;
2723
2724 gcc_unreachable ();
2725 }
2726
2727 /* Mark statement T as modified, and update it. */
2728 static inline void
2729 update_modified_stmts (tree t)
2730 {
2731 if (TREE_CODE (t) == STATEMENT_LIST)
2732 {
2733 tree_stmt_iterator i;
2734 tree stmt;
2735 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2736 {
2737 stmt = tsi_stmt (i);
2738 update_stmt_if_modified (stmt);
2739 }
2740 }
2741 else
2742 update_stmt_if_modified (t);
2743 }
2744
2745 /* Insert statement (or statement list) T before the statement
2746 pointed-to by iterator I. M specifies how to update iterator I
2747 after insertion (see enum bsi_iterator_update). */
2748
2749 void
2750 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2751 {
2752 set_bb_for_stmt (t, i->bb);
2753 update_modified_stmts (t);
2754 tsi_link_before (&i->tsi, t, m);
2755 }
2756
2757
2758 /* Insert statement (or statement list) T after the statement
2759 pointed-to by iterator I. M specifies how to update iterator I
2760 after insertion (see enum bsi_iterator_update). */
2761
2762 void
2763 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2764 {
2765 set_bb_for_stmt (t, i->bb);
2766 update_modified_stmts (t);
2767 tsi_link_after (&i->tsi, t, m);
2768 }
2769
2770
2771 /* Remove the statement pointed to by iterator I. The iterator is updated
2772 to the next statement. */
2773
2774 void
2775 bsi_remove (block_stmt_iterator *i)
2776 {
2777 tree t = bsi_stmt (*i);
2778 set_bb_for_stmt (t, NULL);
2779 delink_stmt_imm_use (t);
2780 tsi_delink (&i->tsi);
2781 mark_stmt_modified (t);
2782 }
2783
2784
2785 /* Move the statement at FROM so it comes right after the statement at TO. */
2786
2787 void
2788 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2789 {
2790 tree stmt = bsi_stmt (*from);
2791 bsi_remove (from);
2792 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2793 }
2794
2795
2796 /* Move the statement at FROM so it comes right before the statement at TO. */
2797
2798 void
2799 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2800 {
2801 tree stmt = bsi_stmt (*from);
2802 bsi_remove (from);
2803 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2804 }
2805
2806
2807 /* Move the statement at FROM to the end of basic block BB. */
2808
2809 void
2810 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2811 {
2812 block_stmt_iterator last = bsi_last (bb);
2813
2814 /* Have to check bsi_end_p because it could be an empty block. */
2815 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2816 bsi_move_before (from, &last);
2817 else
2818 bsi_move_after (from, &last);
2819 }
2820
2821
2822 /* Replace the contents of the statement pointed to by iterator BSI
2823 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2824 information of the original statement is preserved. */
2825
2826 void
2827 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2828 {
2829 int eh_region;
2830 tree orig_stmt = bsi_stmt (*bsi);
2831
2832 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2833 set_bb_for_stmt (stmt, bsi->bb);
2834
2835 /* Preserve EH region information from the original statement, if
2836 requested by the caller. */
2837 if (preserve_eh_info)
2838 {
2839 eh_region = lookup_stmt_eh_region (orig_stmt);
2840 if (eh_region >= 0)
2841 add_stmt_to_eh_region (stmt, eh_region);
2842 }
2843
2844 delink_stmt_imm_use (orig_stmt);
2845 *bsi_stmt_ptr (*bsi) = stmt;
2846 mark_stmt_modified (stmt);
2847 update_modified_stmts (stmt);
2848 }
2849
2850
2851 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2852 is made to place the statement in an existing basic block, but
2853 sometimes that isn't possible. When it isn't possible, the edge is
2854 split and the statement is added to the new block.
2855
2856 In all cases, the returned *BSI points to the correct location. The
2857 return value is true if insertion should be done after the location,
2858 or false if it should be done before the location. If new basic block
2859 has to be created, it is stored in *NEW_BB. */
2860
2861 static bool
2862 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2863 basic_block *new_bb)
2864 {
2865 basic_block dest, src;
2866 tree tmp;
2867
2868 dest = e->dest;
2869 restart:
2870
2871 /* If the destination has one predecessor which has no PHI nodes,
2872 insert there. Except for the exit block.
2873
2874 The requirement for no PHI nodes could be relaxed. Basically we
2875 would have to examine the PHIs to prove that none of them used
2876 the value set by the statement we want to insert on E. That
2877 hardly seems worth the effort. */
2878 if (single_pred_p (dest)
2879 && ! phi_nodes (dest)
2880 && dest != EXIT_BLOCK_PTR)
2881 {
2882 *bsi = bsi_start (dest);
2883 if (bsi_end_p (*bsi))
2884 return true;
2885
2886 /* Make sure we insert after any leading labels. */
2887 tmp = bsi_stmt (*bsi);
2888 while (TREE_CODE (tmp) == LABEL_EXPR)
2889 {
2890 bsi_next (bsi);
2891 if (bsi_end_p (*bsi))
2892 break;
2893 tmp = bsi_stmt (*bsi);
2894 }
2895
2896 if (bsi_end_p (*bsi))
2897 {
2898 *bsi = bsi_last (dest);
2899 return true;
2900 }
2901 else
2902 return false;
2903 }
2904
2905 /* If the source has one successor, the edge is not abnormal and
2906 the last statement does not end a basic block, insert there.
2907 Except for the entry block. */
2908 src = e->src;
2909 if ((e->flags & EDGE_ABNORMAL) == 0
2910 && single_succ_p (src)
2911 && src != ENTRY_BLOCK_PTR)
2912 {
2913 *bsi = bsi_last (src);
2914 if (bsi_end_p (*bsi))
2915 return true;
2916
2917 tmp = bsi_stmt (*bsi);
2918 if (!stmt_ends_bb_p (tmp))
2919 return true;
2920
2921 /* Insert code just before returning the value. We may need to decompose
2922 the return in the case it contains non-trivial operand. */
2923 if (TREE_CODE (tmp) == RETURN_EXPR)
2924 {
2925 tree op = TREE_OPERAND (tmp, 0);
2926 if (!is_gimple_val (op))
2927 {
2928 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
2929 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2930 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
2931 }
2932 bsi_prev (bsi);
2933 return true;
2934 }
2935 }
2936
2937 /* Otherwise, create a new basic block, and split this edge. */
2938 dest = split_edge (e);
2939 if (new_bb)
2940 *new_bb = dest;
2941 e = single_pred_edge (dest);
2942 goto restart;
2943 }
2944
2945
2946 /* This routine will commit all pending edge insertions, creating any new
2947 basic blocks which are necessary. */
2948
2949 void
2950 bsi_commit_edge_inserts (void)
2951 {
2952 basic_block bb;
2953 edge e;
2954 edge_iterator ei;
2955
2956 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2957
2958 FOR_EACH_BB (bb)
2959 FOR_EACH_EDGE (e, ei, bb->succs)
2960 bsi_commit_one_edge_insert (e, NULL);
2961 }
2962
2963
2964 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2965 to this block, otherwise set it to NULL. */
2966
2967 void
2968 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2969 {
2970 if (new_bb)
2971 *new_bb = NULL;
2972 if (PENDING_STMT (e))
2973 {
2974 block_stmt_iterator bsi;
2975 tree stmt = PENDING_STMT (e);
2976
2977 PENDING_STMT (e) = NULL_TREE;
2978
2979 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
2980 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2981 else
2982 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2983 }
2984 }
2985
2986
2987 /* Add STMT to the pending list of edge E. No actual insertion is
2988 made until a call to bsi_commit_edge_inserts () is made. */
2989
2990 void
2991 bsi_insert_on_edge (edge e, tree stmt)
2992 {
2993 append_to_statement_list (stmt, &PENDING_STMT (e));
2994 }
2995
2996 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
2997 block has to be created, it is returned. */
2998
2999 basic_block
3000 bsi_insert_on_edge_immediate (edge e, tree stmt)
3001 {
3002 block_stmt_iterator bsi;
3003 basic_block new_bb = NULL;
3004
3005 gcc_assert (!PENDING_STMT (e));
3006
3007 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3008 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3009 else
3010 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3011
3012 return new_bb;
3013 }
3014
3015 /*---------------------------------------------------------------------------
3016 Tree specific functions for CFG manipulation
3017 ---------------------------------------------------------------------------*/
3018
3019 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3020
3021 static void
3022 reinstall_phi_args (edge new_edge, edge old_edge)
3023 {
3024 tree var, phi;
3025
3026 if (!PENDING_STMT (old_edge))
3027 return;
3028
3029 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3030 var && phi;
3031 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3032 {
3033 tree result = TREE_PURPOSE (var);
3034 tree arg = TREE_VALUE (var);
3035
3036 gcc_assert (result == PHI_RESULT (phi));
3037
3038 add_phi_arg (phi, arg, new_edge);
3039 }
3040
3041 PENDING_STMT (old_edge) = NULL;
3042 }
3043
3044 /* Returns the basic block after that the new basic block created
3045 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3046 near its "logical" location. This is of most help to humans looking
3047 at debugging dumps. */
3048
3049 static basic_block
3050 split_edge_bb_loc (edge edge_in)
3051 {
3052 basic_block dest = edge_in->dest;
3053
3054 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3055 return edge_in->src;
3056 else
3057 return dest->prev_bb;
3058 }
3059
3060 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3061 Abort on abnormal edges. */
3062
3063 static basic_block
3064 tree_split_edge (edge edge_in)
3065 {
3066 basic_block new_bb, after_bb, dest, src;
3067 edge new_edge, e;
3068
3069 /* Abnormal edges cannot be split. */
3070 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3071
3072 src = edge_in->src;
3073 dest = edge_in->dest;
3074
3075 after_bb = split_edge_bb_loc (edge_in);
3076
3077 new_bb = create_empty_bb (after_bb);
3078 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3079 new_bb->count = edge_in->count;
3080 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3081 new_edge->probability = REG_BR_PROB_BASE;
3082 new_edge->count = edge_in->count;
3083
3084 e = redirect_edge_and_branch (edge_in, new_bb);
3085 gcc_assert (e);
3086 reinstall_phi_args (new_edge, e);
3087
3088 return new_bb;
3089 }
3090
3091
3092 /* Return true when BB has label LABEL in it. */
3093
3094 static bool
3095 has_label_p (basic_block bb, tree label)
3096 {
3097 block_stmt_iterator bsi;
3098
3099 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3100 {
3101 tree stmt = bsi_stmt (bsi);
3102
3103 if (TREE_CODE (stmt) != LABEL_EXPR)
3104 return false;
3105 if (LABEL_EXPR_LABEL (stmt) == label)
3106 return true;
3107 }
3108 return false;
3109 }
3110
3111
3112 /* Callback for walk_tree, check that all elements with address taken are
3113 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3114 inside a PHI node. */
3115
3116 static tree
3117 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3118 {
3119 tree t = *tp, x;
3120 bool in_phi = (data != NULL);
3121
3122 if (TYPE_P (t))
3123 *walk_subtrees = 0;
3124
3125 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3126 #define CHECK_OP(N, MSG) \
3127 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3128 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3129
3130 switch (TREE_CODE (t))
3131 {
3132 case SSA_NAME:
3133 if (SSA_NAME_IN_FREE_LIST (t))
3134 {
3135 error ("SSA name in freelist but still referenced");
3136 return *tp;
3137 }
3138 break;
3139
3140 case ASSERT_EXPR:
3141 x = fold (ASSERT_EXPR_COND (t));
3142 if (x == boolean_false_node)
3143 {
3144 error ("ASSERT_EXPR with an always-false condition");
3145 return *tp;
3146 }
3147 break;
3148
3149 case MODIFY_EXPR:
3150 x = TREE_OPERAND (t, 0);
3151 if (TREE_CODE (x) == BIT_FIELD_REF
3152 && is_gimple_reg (TREE_OPERAND (x, 0)))
3153 {
3154 error ("GIMPLE register modified with BIT_FIELD_REF");
3155 return t;
3156 }
3157 break;
3158
3159 case ADDR_EXPR:
3160 {
3161 bool old_invariant;
3162 bool old_constant;
3163 bool old_side_effects;
3164 bool new_invariant;
3165 bool new_constant;
3166 bool new_side_effects;
3167
3168 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3169 dead PHIs that take the address of something. But if the PHI
3170 result is dead, the fact that it takes the address of anything
3171 is irrelevant. Because we can not tell from here if a PHI result
3172 is dead, we just skip this check for PHIs altogether. This means
3173 we may be missing "valid" checks, but what can you do?
3174 This was PR19217. */
3175 if (in_phi)
3176 break;
3177
3178 old_invariant = TREE_INVARIANT (t);
3179 old_constant = TREE_CONSTANT (t);
3180 old_side_effects = TREE_SIDE_EFFECTS (t);
3181
3182 recompute_tree_invarant_for_addr_expr (t);
3183 new_invariant = TREE_INVARIANT (t);
3184 new_side_effects = TREE_SIDE_EFFECTS (t);
3185 new_constant = TREE_CONSTANT (t);
3186
3187 if (old_invariant != new_invariant)
3188 {
3189 error ("invariant not recomputed when ADDR_EXPR changed");
3190 return t;
3191 }
3192
3193 if (old_constant != new_constant)
3194 {
3195 error ("constant not recomputed when ADDR_EXPR changed");
3196 return t;
3197 }
3198 if (old_side_effects != new_side_effects)
3199 {
3200 error ("side effects not recomputed when ADDR_EXPR changed");
3201 return t;
3202 }
3203
3204 /* Skip any references (they will be checked when we recurse down the
3205 tree) and ensure that any variable used as a prefix is marked
3206 addressable. */
3207 for (x = TREE_OPERAND (t, 0);
3208 handled_component_p (x);
3209 x = TREE_OPERAND (x, 0))
3210 ;
3211
3212 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3213 return NULL;
3214 if (!TREE_ADDRESSABLE (x))
3215 {
3216 error ("address taken, but ADDRESSABLE bit not set");
3217 return x;
3218 }
3219 break;
3220 }
3221
3222 case COND_EXPR:
3223 x = COND_EXPR_COND (t);
3224 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3225 {
3226 error ("non-boolean used in condition");
3227 return x;
3228 }
3229 if (!is_gimple_condexpr (x))
3230 {
3231 error ("invalid conditional operand");
3232 return x;
3233 }
3234 break;
3235
3236 case NOP_EXPR:
3237 case CONVERT_EXPR:
3238 case FIX_TRUNC_EXPR:
3239 case FIX_CEIL_EXPR:
3240 case FIX_FLOOR_EXPR:
3241 case FIX_ROUND_EXPR:
3242 case FLOAT_EXPR:
3243 case NEGATE_EXPR:
3244 case ABS_EXPR:
3245 case BIT_NOT_EXPR:
3246 case NON_LVALUE_EXPR:
3247 case TRUTH_NOT_EXPR:
3248 CHECK_OP (0, "invalid operand to unary operator");
3249 break;
3250
3251 case REALPART_EXPR:
3252 case IMAGPART_EXPR:
3253 case COMPONENT_REF:
3254 case ARRAY_REF:
3255 case ARRAY_RANGE_REF:
3256 case BIT_FIELD_REF:
3257 case VIEW_CONVERT_EXPR:
3258 /* We have a nest of references. Verify that each of the operands
3259 that determine where to reference is either a constant or a variable,
3260 verify that the base is valid, and then show we've already checked
3261 the subtrees. */
3262 while (handled_component_p (t))
3263 {
3264 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3265 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3266 else if (TREE_CODE (t) == ARRAY_REF
3267 || TREE_CODE (t) == ARRAY_RANGE_REF)
3268 {
3269 CHECK_OP (1, "invalid array index");
3270 if (TREE_OPERAND (t, 2))
3271 CHECK_OP (2, "invalid array lower bound");
3272 if (TREE_OPERAND (t, 3))
3273 CHECK_OP (3, "invalid array stride");
3274 }
3275 else if (TREE_CODE (t) == BIT_FIELD_REF)
3276 {
3277 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3278 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3279 }
3280
3281 t = TREE_OPERAND (t, 0);
3282 }
3283
3284 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3285 {
3286 error ("invalid reference prefix");
3287 return t;
3288 }
3289 *walk_subtrees = 0;
3290 break;
3291
3292 case LT_EXPR:
3293 case LE_EXPR:
3294 case GT_EXPR:
3295 case GE_EXPR:
3296 case EQ_EXPR:
3297 case NE_EXPR:
3298 case UNORDERED_EXPR:
3299 case ORDERED_EXPR:
3300 case UNLT_EXPR:
3301 case UNLE_EXPR:
3302 case UNGT_EXPR:
3303 case UNGE_EXPR:
3304 case UNEQ_EXPR:
3305 case LTGT_EXPR:
3306 case PLUS_EXPR:
3307 case MINUS_EXPR:
3308 case MULT_EXPR:
3309 case TRUNC_DIV_EXPR:
3310 case CEIL_DIV_EXPR:
3311 case FLOOR_DIV_EXPR:
3312 case ROUND_DIV_EXPR:
3313 case TRUNC_MOD_EXPR:
3314 case CEIL_MOD_EXPR:
3315 case FLOOR_MOD_EXPR:
3316 case ROUND_MOD_EXPR:
3317 case RDIV_EXPR:
3318 case EXACT_DIV_EXPR:
3319 case MIN_EXPR:
3320 case MAX_EXPR:
3321 case LSHIFT_EXPR:
3322 case RSHIFT_EXPR:
3323 case LROTATE_EXPR:
3324 case RROTATE_EXPR:
3325 case BIT_IOR_EXPR:
3326 case BIT_XOR_EXPR:
3327 case BIT_AND_EXPR:
3328 CHECK_OP (0, "invalid operand to binary operator");
3329 CHECK_OP (1, "invalid operand to binary operator");
3330 break;
3331
3332 default:
3333 break;
3334 }
3335 return NULL;
3336
3337 #undef CHECK_OP
3338 }
3339
3340
3341 /* Verify STMT, return true if STMT is not in GIMPLE form.
3342 TODO: Implement type checking. */
3343
3344 static bool
3345 verify_stmt (tree stmt, bool last_in_block)
3346 {
3347 tree addr;
3348
3349 if (!is_gimple_stmt (stmt))
3350 {
3351 error ("is not a valid GIMPLE statement");
3352 goto fail;
3353 }
3354
3355 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3356 if (addr)
3357 {
3358 debug_generic_stmt (addr);
3359 return true;
3360 }
3361
3362 /* If the statement is marked as part of an EH region, then it is
3363 expected that the statement could throw. Verify that when we
3364 have optimizations that simplify statements such that we prove
3365 that they cannot throw, that we update other data structures
3366 to match. */
3367 if (lookup_stmt_eh_region (stmt) >= 0)
3368 {
3369 if (!tree_could_throw_p (stmt))
3370 {
3371 error ("statement marked for throw, but doesn%'t");
3372 goto fail;
3373 }
3374 if (!last_in_block && tree_can_throw_internal (stmt))
3375 {
3376 error ("statement marked for throw in middle of block");
3377 goto fail;
3378 }
3379 }
3380
3381 return false;
3382
3383 fail:
3384 debug_generic_stmt (stmt);
3385 return true;
3386 }
3387
3388
3389 /* Return true when the T can be shared. */
3390
3391 static bool
3392 tree_node_can_be_shared (tree t)
3393 {
3394 if (IS_TYPE_OR_DECL_P (t)
3395 /* We check for constants explicitly since they are not considered
3396 gimple invariants if they overflowed. */
3397 || CONSTANT_CLASS_P (t)
3398 || is_gimple_min_invariant (t)
3399 || TREE_CODE (t) == SSA_NAME
3400 || t == error_mark_node)
3401 return true;
3402
3403 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3404 return true;
3405
3406 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3407 /* We check for constants explicitly since they are not considered
3408 gimple invariants if they overflowed. */
3409 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3410 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3411 || (TREE_CODE (t) == COMPONENT_REF
3412 || TREE_CODE (t) == REALPART_EXPR
3413 || TREE_CODE (t) == IMAGPART_EXPR))
3414 t = TREE_OPERAND (t, 0);
3415
3416 if (DECL_P (t))
3417 return true;
3418
3419 return false;
3420 }
3421
3422
3423 /* Called via walk_trees. Verify tree sharing. */
3424
3425 static tree
3426 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3427 {
3428 htab_t htab = (htab_t) data;
3429 void **slot;
3430
3431 if (tree_node_can_be_shared (*tp))
3432 {
3433 *walk_subtrees = false;
3434 return NULL;
3435 }
3436
3437 slot = htab_find_slot (htab, *tp, INSERT);
3438 if (*slot)
3439 return *slot;
3440 *slot = *tp;
3441
3442 return NULL;
3443 }
3444
3445
3446 /* Verify the GIMPLE statement chain. */
3447
3448 void
3449 verify_stmts (void)
3450 {
3451 basic_block bb;
3452 block_stmt_iterator bsi;
3453 bool err = false;
3454 htab_t htab;
3455 tree addr;
3456
3457 timevar_push (TV_TREE_STMT_VERIFY);
3458 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3459
3460 FOR_EACH_BB (bb)
3461 {
3462 tree phi;
3463 int i;
3464
3465 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3466 {
3467 int phi_num_args = PHI_NUM_ARGS (phi);
3468
3469 if (bb_for_stmt (phi) != bb)
3470 {
3471 error ("bb_for_stmt (phi) is set to a wrong basic block");
3472 err |= true;
3473 }
3474
3475 for (i = 0; i < phi_num_args; i++)
3476 {
3477 tree t = PHI_ARG_DEF (phi, i);
3478 tree addr;
3479
3480 /* Addressable variables do have SSA_NAMEs but they
3481 are not considered gimple values. */
3482 if (TREE_CODE (t) != SSA_NAME
3483 && TREE_CODE (t) != FUNCTION_DECL
3484 && !is_gimple_val (t))
3485 {
3486 error ("PHI def is not a GIMPLE value");
3487 debug_generic_stmt (phi);
3488 debug_generic_stmt (t);
3489 err |= true;
3490 }
3491
3492 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3493 if (addr)
3494 {
3495 debug_generic_stmt (addr);
3496 err |= true;
3497 }
3498
3499 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3500 if (addr)
3501 {
3502 error ("incorrect sharing of tree nodes");
3503 debug_generic_stmt (phi);
3504 debug_generic_stmt (addr);
3505 err |= true;
3506 }
3507 }
3508 }
3509
3510 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3511 {
3512 tree stmt = bsi_stmt (bsi);
3513
3514 if (bb_for_stmt (stmt) != bb)
3515 {
3516 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3517 err |= true;
3518 }
3519
3520 bsi_next (&bsi);
3521 err |= verify_stmt (stmt, bsi_end_p (bsi));
3522 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3523 if (addr)
3524 {
3525 error ("incorrect sharing of tree nodes");
3526 debug_generic_stmt (stmt);
3527 debug_generic_stmt (addr);
3528 err |= true;
3529 }
3530 }
3531 }
3532
3533 if (err)
3534 internal_error ("verify_stmts failed");
3535
3536 htab_delete (htab);
3537 timevar_pop (TV_TREE_STMT_VERIFY);
3538 }
3539
3540
3541 /* Verifies that the flow information is OK. */
3542
3543 static int
3544 tree_verify_flow_info (void)
3545 {
3546 int err = 0;
3547 basic_block bb;
3548 block_stmt_iterator bsi;
3549 tree stmt;
3550 edge e;
3551 edge_iterator ei;
3552
3553 if (ENTRY_BLOCK_PTR->stmt_list)
3554 {
3555 error ("ENTRY_BLOCK has a statement list associated with it");
3556 err = 1;
3557 }
3558
3559 if (EXIT_BLOCK_PTR->stmt_list)
3560 {
3561 error ("EXIT_BLOCK has a statement list associated with it");
3562 err = 1;
3563 }
3564
3565 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3566 if (e->flags & EDGE_FALLTHRU)
3567 {
3568 error ("fallthru to exit from bb %d", e->src->index);
3569 err = 1;
3570 }
3571
3572 FOR_EACH_BB (bb)
3573 {
3574 bool found_ctrl_stmt = false;
3575
3576 stmt = NULL_TREE;
3577
3578 /* Skip labels on the start of basic block. */
3579 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3580 {
3581 tree prev_stmt = stmt;
3582
3583 stmt = bsi_stmt (bsi);
3584
3585 if (TREE_CODE (stmt) != LABEL_EXPR)
3586 break;
3587
3588 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3589 {
3590 error ("nonlocal label %s is not first "
3591 "in a sequence of labels in bb %d",
3592 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3593 bb->index);
3594 err = 1;
3595 }
3596
3597 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3598 {
3599 error ("label %s to block does not match in bb %d",
3600 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3601 bb->index);
3602 err = 1;
3603 }
3604
3605 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3606 != current_function_decl)
3607 {
3608 error ("label %s has incorrect context in bb %d",
3609 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3610 bb->index);
3611 err = 1;
3612 }
3613 }
3614
3615 /* Verify that body of basic block BB is free of control flow. */
3616 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3617 {
3618 tree stmt = bsi_stmt (bsi);
3619
3620 if (found_ctrl_stmt)
3621 {
3622 error ("control flow in the middle of basic block %d",
3623 bb->index);
3624 err = 1;
3625 }
3626
3627 if (stmt_ends_bb_p (stmt))
3628 found_ctrl_stmt = true;
3629
3630 if (TREE_CODE (stmt) == LABEL_EXPR)
3631 {
3632 error ("label %s in the middle of basic block %d",
3633 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3634 bb->index);
3635 err = 1;
3636 }
3637 }
3638 bsi = bsi_last (bb);
3639 if (bsi_end_p (bsi))
3640 continue;
3641
3642 stmt = bsi_stmt (bsi);
3643
3644 err |= verify_eh_edges (stmt);
3645
3646 if (is_ctrl_stmt (stmt))
3647 {
3648 FOR_EACH_EDGE (e, ei, bb->succs)
3649 if (e->flags & EDGE_FALLTHRU)
3650 {
3651 error ("fallthru edge after a control statement in bb %d",
3652 bb->index);
3653 err = 1;
3654 }
3655 }
3656
3657 switch (TREE_CODE (stmt))
3658 {
3659 case COND_EXPR:
3660 {
3661 edge true_edge;
3662 edge false_edge;
3663 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3664 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3665 {
3666 error ("structured COND_EXPR at the end of bb %d", bb->index);
3667 err = 1;
3668 }
3669
3670 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3671
3672 if (!true_edge || !false_edge
3673 || !(true_edge->flags & EDGE_TRUE_VALUE)
3674 || !(false_edge->flags & EDGE_FALSE_VALUE)
3675 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3676 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3677 || EDGE_COUNT (bb->succs) >= 3)
3678 {
3679 error ("wrong outgoing edge flags at end of bb %d",
3680 bb->index);
3681 err = 1;
3682 }
3683
3684 if (!has_label_p (true_edge->dest,
3685 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3686 {
3687 error ("%<then%> label does not match edge at end of bb %d",
3688 bb->index);
3689 err = 1;
3690 }
3691
3692 if (!has_label_p (false_edge->dest,
3693 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3694 {
3695 error ("%<else%> label does not match edge at end of bb %d",
3696 bb->index);
3697 err = 1;
3698 }
3699 }
3700 break;
3701
3702 case GOTO_EXPR:
3703 if (simple_goto_p (stmt))
3704 {
3705 error ("explicit goto at end of bb %d", bb->index);
3706 err = 1;
3707 }
3708 else
3709 {
3710 /* FIXME. We should double check that the labels in the
3711 destination blocks have their address taken. */
3712 FOR_EACH_EDGE (e, ei, bb->succs)
3713 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3714 | EDGE_FALSE_VALUE))
3715 || !(e->flags & EDGE_ABNORMAL))
3716 {
3717 error ("wrong outgoing edge flags at end of bb %d",
3718 bb->index);
3719 err = 1;
3720 }
3721 }
3722 break;
3723
3724 case RETURN_EXPR:
3725 if (!single_succ_p (bb)
3726 || (single_succ_edge (bb)->flags
3727 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3728 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3729 {
3730 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3731 err = 1;
3732 }
3733 if (single_succ (bb) != EXIT_BLOCK_PTR)
3734 {
3735 error ("return edge does not point to exit in bb %d",
3736 bb->index);
3737 err = 1;
3738 }
3739 break;
3740
3741 case SWITCH_EXPR:
3742 {
3743 tree prev;
3744 edge e;
3745 size_t i, n;
3746 tree vec;
3747
3748 vec = SWITCH_LABELS (stmt);
3749 n = TREE_VEC_LENGTH (vec);
3750
3751 /* Mark all the destination basic blocks. */
3752 for (i = 0; i < n; ++i)
3753 {
3754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3755 basic_block label_bb = label_to_block (lab);
3756
3757 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3758 label_bb->aux = (void *)1;
3759 }
3760
3761 /* Verify that the case labels are sorted. */
3762 prev = TREE_VEC_ELT (vec, 0);
3763 for (i = 1; i < n - 1; ++i)
3764 {
3765 tree c = TREE_VEC_ELT (vec, i);
3766 if (! CASE_LOW (c))
3767 {
3768 error ("found default case not at end of case vector");
3769 err = 1;
3770 continue;
3771 }
3772 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3773 {
3774 error ("case labels not sorted:");
3775 print_generic_expr (stderr, prev, 0);
3776 fprintf (stderr," is greater than ");
3777 print_generic_expr (stderr, c, 0);
3778 fprintf (stderr," but comes before it.\n");
3779 err = 1;
3780 }
3781 prev = c;
3782 }
3783 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3784 {
3785 error ("no default case found at end of case vector");
3786 err = 1;
3787 }
3788
3789 FOR_EACH_EDGE (e, ei, bb->succs)
3790 {
3791 if (!e->dest->aux)
3792 {
3793 error ("extra outgoing edge %d->%d",
3794 bb->index, e->dest->index);
3795 err = 1;
3796 }
3797 e->dest->aux = (void *)2;
3798 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3799 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3800 {
3801 error ("wrong outgoing edge flags at end of bb %d",
3802 bb->index);
3803 err = 1;
3804 }
3805 }
3806
3807 /* Check that we have all of them. */
3808 for (i = 0; i < n; ++i)
3809 {
3810 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3811 basic_block label_bb = label_to_block (lab);
3812
3813 if (label_bb->aux != (void *)2)
3814 {
3815 error ("missing edge %i->%i",
3816 bb->index, label_bb->index);
3817 err = 1;
3818 }
3819 }
3820
3821 FOR_EACH_EDGE (e, ei, bb->succs)
3822 e->dest->aux = (void *)0;
3823 }
3824
3825 default: ;
3826 }
3827 }
3828
3829 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3830 verify_dominators (CDI_DOMINATORS);
3831
3832 return err;
3833 }
3834
3835
3836 /* Updates phi nodes after creating a forwarder block joined
3837 by edge FALLTHRU. */
3838
3839 static void
3840 tree_make_forwarder_block (edge fallthru)
3841 {
3842 edge e;
3843 edge_iterator ei;
3844 basic_block dummy, bb;
3845 tree phi, new_phi, var;
3846
3847 dummy = fallthru->src;
3848 bb = fallthru->dest;
3849
3850 if (single_pred_p (bb))
3851 return;
3852
3853 /* If we redirected a branch we must create new phi nodes at the
3854 start of BB. */
3855 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3856 {
3857 var = PHI_RESULT (phi);
3858 new_phi = create_phi_node (var, bb);
3859 SSA_NAME_DEF_STMT (var) = new_phi;
3860 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3861 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3862 }
3863
3864 /* Ensure that the PHI node chain is in the same order. */
3865 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3866
3867 /* Add the arguments we have stored on edges. */
3868 FOR_EACH_EDGE (e, ei, bb->preds)
3869 {
3870 if (e == fallthru)
3871 continue;
3872
3873 flush_pending_stmts (e);
3874 }
3875 }
3876
3877
3878 /* Return a non-special label in the head of basic block BLOCK.
3879 Create one if it doesn't exist. */
3880
3881 tree
3882 tree_block_label (basic_block bb)
3883 {
3884 block_stmt_iterator i, s = bsi_start (bb);
3885 bool first = true;
3886 tree label, stmt;
3887
3888 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3889 {
3890 stmt = bsi_stmt (i);
3891 if (TREE_CODE (stmt) != LABEL_EXPR)
3892 break;
3893 label = LABEL_EXPR_LABEL (stmt);
3894 if (!DECL_NONLOCAL (label))
3895 {
3896 if (!first)
3897 bsi_move_before (&i, &s);
3898 return label;
3899 }
3900 }
3901
3902 label = create_artificial_label ();
3903 stmt = build1 (LABEL_EXPR, void_type_node, label);
3904 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
3905 return label;
3906 }
3907
3908
3909 /* Attempt to perform edge redirection by replacing a possibly complex
3910 jump instruction by a goto or by removing the jump completely.
3911 This can apply only if all edges now point to the same block. The
3912 parameters and return values are equivalent to
3913 redirect_edge_and_branch. */
3914
3915 static edge
3916 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
3917 {
3918 basic_block src = e->src;
3919 block_stmt_iterator b;
3920 tree stmt;
3921
3922 /* We can replace or remove a complex jump only when we have exactly
3923 two edges. */
3924 if (EDGE_COUNT (src->succs) != 2
3925 /* Verify that all targets will be TARGET. Specifically, the
3926 edge that is not E must also go to TARGET. */
3927 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
3928 return NULL;
3929
3930 b = bsi_last (src);
3931 if (bsi_end_p (b))
3932 return NULL;
3933 stmt = bsi_stmt (b);
3934
3935 if (TREE_CODE (stmt) == COND_EXPR
3936 || TREE_CODE (stmt) == SWITCH_EXPR)
3937 {
3938 bsi_remove (&b);
3939 e = ssa_redirect_edge (e, target);
3940 e->flags = EDGE_FALLTHRU;
3941 return e;
3942 }
3943
3944 return NULL;
3945 }
3946
3947
3948 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
3949 edge representing the redirected branch. */
3950
3951 static edge
3952 tree_redirect_edge_and_branch (edge e, basic_block dest)
3953 {
3954 basic_block bb = e->src;
3955 block_stmt_iterator bsi;
3956 edge ret;
3957 tree label, stmt;
3958
3959 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3960 return NULL;
3961
3962 if (e->src != ENTRY_BLOCK_PTR
3963 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
3964 return ret;
3965
3966 if (e->dest == dest)
3967 return NULL;
3968
3969 label = tree_block_label (dest);
3970
3971 bsi = bsi_last (bb);
3972 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
3973
3974 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
3975 {
3976 case COND_EXPR:
3977 stmt = (e->flags & EDGE_TRUE_VALUE
3978 ? COND_EXPR_THEN (stmt)
3979 : COND_EXPR_ELSE (stmt));
3980 GOTO_DESTINATION (stmt) = label;
3981 break;
3982
3983 case GOTO_EXPR:
3984 /* No non-abnormal edges should lead from a non-simple goto, and
3985 simple ones should be represented implicitly. */
3986 gcc_unreachable ();
3987
3988 case SWITCH_EXPR:
3989 {
3990 tree cases = get_cases_for_edge (e, stmt);
3991
3992 /* If we have a list of cases associated with E, then use it
3993 as it's a lot faster than walking the entire case vector. */
3994 if (cases)
3995 {
3996 edge e2 = find_edge (e->src, dest);
3997 tree last, first;
3998
3999 first = cases;
4000 while (cases)
4001 {
4002 last = cases;
4003 CASE_LABEL (cases) = label;
4004 cases = TREE_CHAIN (cases);
4005 }
4006
4007 /* If there was already an edge in the CFG, then we need
4008 to move all the cases associated with E to E2. */
4009 if (e2)
4010 {
4011 tree cases2 = get_cases_for_edge (e2, stmt);
4012
4013 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4014 TREE_CHAIN (cases2) = first;
4015 }
4016 }
4017 else
4018 {
4019 tree vec = SWITCH_LABELS (stmt);
4020 size_t i, n = TREE_VEC_LENGTH (vec);
4021
4022 for (i = 0; i < n; i++)
4023 {
4024 tree elt = TREE_VEC_ELT (vec, i);
4025
4026 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4027 CASE_LABEL (elt) = label;
4028 }
4029 }
4030
4031 break;
4032 }
4033
4034 case RETURN_EXPR:
4035 bsi_remove (&bsi);
4036 e->flags |= EDGE_FALLTHRU;
4037 break;
4038
4039 default:
4040 /* Otherwise it must be a fallthru edge, and we don't need to
4041 do anything besides redirecting it. */
4042 gcc_assert (e->flags & EDGE_FALLTHRU);
4043 break;
4044 }
4045
4046 /* Update/insert PHI nodes as necessary. */
4047
4048 /* Now update the edges in the CFG. */
4049 e = ssa_redirect_edge (e, dest);
4050
4051 return e;
4052 }
4053
4054
4055 /* Simple wrapper, as we can always redirect fallthru edges. */
4056
4057 static basic_block
4058 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4059 {
4060 e = tree_redirect_edge_and_branch (e, dest);
4061 gcc_assert (e);
4062
4063 return NULL;
4064 }
4065
4066
4067 /* Splits basic block BB after statement STMT (but at least after the
4068 labels). If STMT is NULL, BB is split just after the labels. */
4069
4070 static basic_block
4071 tree_split_block (basic_block bb, void *stmt)
4072 {
4073 block_stmt_iterator bsi, bsi_tgt;
4074 tree act;
4075 basic_block new_bb;
4076 edge e;
4077 edge_iterator ei;
4078
4079 new_bb = create_empty_bb (bb);
4080
4081 /* Redirect the outgoing edges. */
4082 new_bb->succs = bb->succs;
4083 bb->succs = NULL;
4084 FOR_EACH_EDGE (e, ei, new_bb->succs)
4085 e->src = new_bb;
4086
4087 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4088 stmt = NULL;
4089
4090 /* Move everything from BSI to the new basic block. */
4091 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4092 {
4093 act = bsi_stmt (bsi);
4094 if (TREE_CODE (act) == LABEL_EXPR)
4095 continue;
4096
4097 if (!stmt)
4098 break;
4099
4100 if (stmt == act)
4101 {
4102 bsi_next (&bsi);
4103 break;
4104 }
4105 }
4106
4107 bsi_tgt = bsi_start (new_bb);
4108 while (!bsi_end_p (bsi))
4109 {
4110 act = bsi_stmt (bsi);
4111 bsi_remove (&bsi);
4112 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4113 }
4114
4115 return new_bb;
4116 }
4117
4118
4119 /* Moves basic block BB after block AFTER. */
4120
4121 static bool
4122 tree_move_block_after (basic_block bb, basic_block after)
4123 {
4124 if (bb->prev_bb == after)
4125 return true;
4126
4127 unlink_block (bb);
4128 link_block (bb, after);
4129
4130 return true;
4131 }
4132
4133
4134 /* Return true if basic_block can be duplicated. */
4135
4136 static bool
4137 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4138 {
4139 return true;
4140 }
4141
4142
4143 /* Create a duplicate of the basic block BB. NOTE: This does not
4144 preserve SSA form. */
4145
4146 static basic_block
4147 tree_duplicate_bb (basic_block bb)
4148 {
4149 basic_block new_bb;
4150 block_stmt_iterator bsi, bsi_tgt;
4151 tree phi;
4152
4153 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4154
4155 /* Copy the PHI nodes. We ignore PHI node arguments here because
4156 the incoming edges have not been setup yet. */
4157 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4158 {
4159 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4160 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4161 }
4162
4163 /* Keep the chain of PHI nodes in the same order so that they can be
4164 updated by ssa_redirect_edge. */
4165 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4166
4167 bsi_tgt = bsi_start (new_bb);
4168 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4169 {
4170 def_operand_p def_p;
4171 ssa_op_iter op_iter;
4172 tree stmt, copy;
4173 int region;
4174
4175 stmt = bsi_stmt (bsi);
4176 if (TREE_CODE (stmt) == LABEL_EXPR)
4177 continue;
4178
4179 /* Create a new copy of STMT and duplicate STMT's virtual
4180 operands. */
4181 copy = unshare_expr (stmt);
4182 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4183 copy_virtual_operands (copy, stmt);
4184 region = lookup_stmt_eh_region (stmt);
4185 if (region >= 0)
4186 add_stmt_to_eh_region (copy, region);
4187
4188 /* Create new names for all the definitions created by COPY and
4189 add replacement mappings for each new name. */
4190 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4191 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4192 }
4193
4194 return new_bb;
4195 }
4196
4197
4198 /* Basic block BB_COPY was created by code duplication. Add phi node
4199 arguments for edges going out of BB_COPY. The blocks that were
4200 duplicated have BB_DUPLICATED set. */
4201
4202 void
4203 add_phi_args_after_copy_bb (basic_block bb_copy)
4204 {
4205 basic_block bb, dest;
4206 edge e, e_copy;
4207 edge_iterator ei;
4208 tree phi, phi_copy, phi_next, def;
4209
4210 bb = get_bb_original (bb_copy);
4211
4212 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4213 {
4214 if (!phi_nodes (e_copy->dest))
4215 continue;
4216
4217 if (e_copy->dest->flags & BB_DUPLICATED)
4218 dest = get_bb_original (e_copy->dest);
4219 else
4220 dest = e_copy->dest;
4221
4222 e = find_edge (bb, dest);
4223 if (!e)
4224 {
4225 /* During loop unrolling the target of the latch edge is copied.
4226 In this case we are not looking for edge to dest, but to
4227 duplicated block whose original was dest. */
4228 FOR_EACH_EDGE (e, ei, bb->succs)
4229 if ((e->dest->flags & BB_DUPLICATED)
4230 && get_bb_original (e->dest) == dest)
4231 break;
4232
4233 gcc_assert (e != NULL);
4234 }
4235
4236 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4237 phi;
4238 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4239 {
4240 phi_next = PHI_CHAIN (phi);
4241 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4242 add_phi_arg (phi_copy, def, e_copy);
4243 }
4244 }
4245 }
4246
4247 /* Blocks in REGION_COPY array of length N_REGION were created by
4248 duplication of basic blocks. Add phi node arguments for edges
4249 going from these blocks. */
4250
4251 void
4252 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4253 {
4254 unsigned i;
4255
4256 for (i = 0; i < n_region; i++)
4257 region_copy[i]->flags |= BB_DUPLICATED;
4258
4259 for (i = 0; i < n_region; i++)
4260 add_phi_args_after_copy_bb (region_copy[i]);
4261
4262 for (i = 0; i < n_region; i++)
4263 region_copy[i]->flags &= ~BB_DUPLICATED;
4264 }
4265
4266 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4267 important exit edge EXIT. By important we mean that no SSA name defined
4268 inside region is live over the other exit edges of the region. All entry
4269 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4270 to the duplicate of the region. SSA form, dominance and loop information
4271 is updated. The new basic blocks are stored to REGION_COPY in the same
4272 order as they had in REGION, provided that REGION_COPY is not NULL.
4273 The function returns false if it is unable to copy the region,
4274 true otherwise. */
4275
4276 bool
4277 tree_duplicate_sese_region (edge entry, edge exit,
4278 basic_block *region, unsigned n_region,
4279 basic_block *region_copy)
4280 {
4281 unsigned i, n_doms;
4282 bool free_region_copy = false, copying_header = false;
4283 struct loop *loop = entry->dest->loop_father;
4284 edge exit_copy;
4285 basic_block *doms;
4286 edge redirected;
4287 int total_freq = 0, entry_freq = 0;
4288 gcov_type total_count = 0, entry_count = 0;
4289
4290 if (!can_copy_bbs_p (region, n_region))
4291 return false;
4292
4293 /* Some sanity checking. Note that we do not check for all possible
4294 missuses of the functions. I.e. if you ask to copy something weird,
4295 it will work, but the state of structures probably will not be
4296 correct. */
4297 for (i = 0; i < n_region; i++)
4298 {
4299 /* We do not handle subloops, i.e. all the blocks must belong to the
4300 same loop. */
4301 if (region[i]->loop_father != loop)
4302 return false;
4303
4304 if (region[i] != entry->dest
4305 && region[i] == loop->header)
4306 return false;
4307 }
4308
4309 loop->copy = loop;
4310
4311 /* In case the function is used for loop header copying (which is the primary
4312 use), ensure that EXIT and its copy will be new latch and entry edges. */
4313 if (loop->header == entry->dest)
4314 {
4315 copying_header = true;
4316 loop->copy = loop->outer;
4317
4318 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4319 return false;
4320
4321 for (i = 0; i < n_region; i++)
4322 if (region[i] != exit->src
4323 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4324 return false;
4325 }
4326
4327 if (!region_copy)
4328 {
4329 region_copy = xmalloc (sizeof (basic_block) * n_region);
4330 free_region_copy = true;
4331 }
4332
4333 gcc_assert (!need_ssa_update_p ());
4334
4335 /* Record blocks outside the region that are dominated by something
4336 inside. */
4337 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4338 initialize_original_copy_tables ();
4339
4340 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4341
4342 if (entry->dest->count)
4343 {
4344 total_count = entry->dest->count;
4345 entry_count = entry->count;
4346 /* Fix up corner cases, to avoid division by zero or creation of negative
4347 frequencies. */
4348 if (entry_count > total_count)
4349 entry_count = total_count;
4350 }
4351 else
4352 {
4353 total_freq = entry->dest->frequency;
4354 entry_freq = EDGE_FREQUENCY (entry);
4355 /* Fix up corner cases, to avoid division by zero or creation of negative
4356 frequencies. */
4357 if (total_freq == 0)
4358 total_freq = 1;
4359 else if (entry_freq > total_freq)
4360 entry_freq = total_freq;
4361 }
4362
4363 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4364 split_edge_bb_loc (entry));
4365 if (total_count)
4366 {
4367 scale_bbs_frequencies_gcov_type (region, n_region,
4368 total_count - entry_count,
4369 total_count);
4370 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4371 total_count);
4372 }
4373 else
4374 {
4375 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4376 total_freq);
4377 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4378 }
4379
4380 if (copying_header)
4381 {
4382 loop->header = exit->dest;
4383 loop->latch = exit->src;
4384 }
4385
4386 /* Redirect the entry and add the phi node arguments. */
4387 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4388 gcc_assert (redirected != NULL);
4389 flush_pending_stmts (entry);
4390
4391 /* Concerning updating of dominators: We must recount dominators
4392 for entry block and its copy. Anything that is outside of the
4393 region, but was dominated by something inside needs recounting as
4394 well. */
4395 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4396 doms[n_doms++] = get_bb_original (entry->dest);
4397 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4398 free (doms);
4399
4400 /* Add the other PHI node arguments. */
4401 add_phi_args_after_copy (region_copy, n_region);
4402
4403 /* Update the SSA web. */
4404 update_ssa (TODO_update_ssa);
4405
4406 if (free_region_copy)
4407 free (region_copy);
4408
4409 free_original_copy_tables ();
4410 return true;
4411 }
4412
4413
4414 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4415
4416 void
4417 dump_function_to_file (tree fn, FILE *file, int flags)
4418 {
4419 tree arg, vars, var;
4420 bool ignore_topmost_bind = false, any_var = false;
4421 basic_block bb;
4422 tree chain;
4423
4424 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4425
4426 arg = DECL_ARGUMENTS (fn);
4427 while (arg)
4428 {
4429 print_generic_expr (file, arg, dump_flags);
4430 if (TREE_CHAIN (arg))
4431 fprintf (file, ", ");
4432 arg = TREE_CHAIN (arg);
4433 }
4434 fprintf (file, ")\n");
4435
4436 if (flags & TDF_DETAILS)
4437 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4438 if (flags & TDF_RAW)
4439 {
4440 dump_node (fn, TDF_SLIM | flags, file);
4441 return;
4442 }
4443
4444 /* When GIMPLE is lowered, the variables are no longer available in
4445 BIND_EXPRs, so display them separately. */
4446 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4447 {
4448 ignore_topmost_bind = true;
4449
4450 fprintf (file, "{\n");
4451 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4452 {
4453 var = TREE_VALUE (vars);
4454
4455 print_generic_decl (file, var, flags);
4456 fprintf (file, "\n");
4457
4458 any_var = true;
4459 }
4460 }
4461
4462 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4463 {
4464 /* Make a CFG based dump. */
4465 check_bb_profile (ENTRY_BLOCK_PTR, file);
4466 if (!ignore_topmost_bind)
4467 fprintf (file, "{\n");
4468
4469 if (any_var && n_basic_blocks)
4470 fprintf (file, "\n");
4471
4472 FOR_EACH_BB (bb)
4473 dump_generic_bb (file, bb, 2, flags);
4474
4475 fprintf (file, "}\n");
4476 check_bb_profile (EXIT_BLOCK_PTR, file);
4477 }
4478 else
4479 {
4480 int indent;
4481
4482 /* Make a tree based dump. */
4483 chain = DECL_SAVED_TREE (fn);
4484
4485 if (TREE_CODE (chain) == BIND_EXPR)
4486 {
4487 if (ignore_topmost_bind)
4488 {
4489 chain = BIND_EXPR_BODY (chain);
4490 indent = 2;
4491 }
4492 else
4493 indent = 0;
4494 }
4495 else
4496 {
4497 if (!ignore_topmost_bind)
4498 fprintf (file, "{\n");
4499 indent = 2;
4500 }
4501
4502 if (any_var)
4503 fprintf (file, "\n");
4504
4505 print_generic_stmt_indented (file, chain, flags, indent);
4506 if (ignore_topmost_bind)
4507 fprintf (file, "}\n");
4508 }
4509
4510 fprintf (file, "\n\n");
4511 }
4512
4513
4514 /* Pretty print of the loops intermediate representation. */
4515 static void print_loop (FILE *, struct loop *, int);
4516 static void print_pred_bbs (FILE *, basic_block bb);
4517 static void print_succ_bbs (FILE *, basic_block bb);
4518
4519
4520 /* Print on FILE the indexes for the predecessors of basic_block BB. */
4521
4522 static void
4523 print_pred_bbs (FILE *file, basic_block bb)
4524 {
4525 edge e;
4526 edge_iterator ei;
4527
4528 FOR_EACH_EDGE (e, ei, bb->preds)
4529 fprintf (file, "bb_%d ", e->src->index);
4530 }
4531
4532
4533 /* Print on FILE the indexes for the successors of basic_block BB. */
4534
4535 static void
4536 print_succ_bbs (FILE *file, basic_block bb)
4537 {
4538 edge e;
4539 edge_iterator ei;
4540
4541 FOR_EACH_EDGE (e, ei, bb->succs)
4542 fprintf (file, "bb_%d ", e->dest->index);
4543 }
4544
4545
4546 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4547
4548 static void
4549 print_loop (FILE *file, struct loop *loop, int indent)
4550 {
4551 char *s_indent;
4552 basic_block bb;
4553
4554 if (loop == NULL)
4555 return;
4556
4557 s_indent = (char *) alloca ((size_t) indent + 1);
4558 memset ((void *) s_indent, ' ', (size_t) indent);
4559 s_indent[indent] = '\0';
4560
4561 /* Print the loop's header. */
4562 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4563
4564 /* Print the loop's body. */
4565 fprintf (file, "%s{\n", s_indent);
4566 FOR_EACH_BB (bb)
4567 if (bb->loop_father == loop)
4568 {
4569 /* Print the basic_block's header. */
4570 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
4571 print_pred_bbs (file, bb);
4572 fprintf (file, "}, succs = {");
4573 print_succ_bbs (file, bb);
4574 fprintf (file, "})\n");
4575
4576 /* Print the basic_block's body. */
4577 fprintf (file, "%s {\n", s_indent);
4578 tree_dump_bb (bb, file, indent + 4);
4579 fprintf (file, "%s }\n", s_indent);
4580 }
4581
4582 print_loop (file, loop->inner, indent + 2);
4583 fprintf (file, "%s}\n", s_indent);
4584 print_loop (file, loop->next, indent);
4585 }
4586
4587
4588 /* Follow a CFG edge from the entry point of the program, and on entry
4589 of a loop, pretty print the loop structure on FILE. */
4590
4591 void
4592 print_loop_ir (FILE *file)
4593 {
4594 basic_block bb;
4595
4596 bb = BASIC_BLOCK (0);
4597 if (bb && bb->loop_father)
4598 print_loop (file, bb->loop_father, 0);
4599 }
4600
4601
4602 /* Debugging loops structure at tree level. */
4603
4604 void
4605 debug_loop_ir (void)
4606 {
4607 print_loop_ir (stderr);
4608 }
4609
4610
4611 /* Return true if BB ends with a call, possibly followed by some
4612 instructions that must stay with the call. Return false,
4613 otherwise. */
4614
4615 static bool
4616 tree_block_ends_with_call_p (basic_block bb)
4617 {
4618 block_stmt_iterator bsi = bsi_last (bb);
4619 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
4620 }
4621
4622
4623 /* Return true if BB ends with a conditional branch. Return false,
4624 otherwise. */
4625
4626 static bool
4627 tree_block_ends_with_condjump_p (basic_block bb)
4628 {
4629 tree stmt = last_stmt (bb);
4630 return (stmt && TREE_CODE (stmt) == COND_EXPR);
4631 }
4632
4633
4634 /* Return true if we need to add fake edge to exit at statement T.
4635 Helper function for tree_flow_call_edges_add. */
4636
4637 static bool
4638 need_fake_edge_p (tree t)
4639 {
4640 tree call;
4641
4642 /* NORETURN and LONGJMP calls already have an edge to exit.
4643 CONST and PURE calls do not need one.
4644 We don't currently check for CONST and PURE here, although
4645 it would be a good idea, because those attributes are
4646 figured out from the RTL in mark_constant_function, and
4647 the counter incrementation code from -fprofile-arcs
4648 leads to different results from -fbranch-probabilities. */
4649 call = get_call_expr_in (t);
4650 if (call
4651 && !(call_expr_flags (call) & ECF_NORETURN))
4652 return true;
4653
4654 if (TREE_CODE (t) == ASM_EXPR
4655 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
4656 return true;
4657
4658 return false;
4659 }
4660
4661
4662 /* Add fake edges to the function exit for any non constant and non
4663 noreturn calls, volatile inline assembly in the bitmap of blocks
4664 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
4665 the number of blocks that were split.
4666
4667 The goal is to expose cases in which entering a basic block does
4668 not imply that all subsequent instructions must be executed. */
4669
4670 static int
4671 tree_flow_call_edges_add (sbitmap blocks)
4672 {
4673 int i;
4674 int blocks_split = 0;
4675 int last_bb = last_basic_block;
4676 bool check_last_block = false;
4677
4678 if (n_basic_blocks == 0)
4679 return 0;
4680
4681 if (! blocks)
4682 check_last_block = true;
4683 else
4684 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4685
4686 /* In the last basic block, before epilogue generation, there will be
4687 a fallthru edge to EXIT. Special care is required if the last insn
4688 of the last basic block is a call because make_edge folds duplicate
4689 edges, which would result in the fallthru edge also being marked
4690 fake, which would result in the fallthru edge being removed by
4691 remove_fake_edges, which would result in an invalid CFG.
4692
4693 Moreover, we can't elide the outgoing fake edge, since the block
4694 profiler needs to take this into account in order to solve the minimal
4695 spanning tree in the case that the call doesn't return.
4696
4697 Handle this by adding a dummy instruction in a new last basic block. */
4698 if (check_last_block)
4699 {
4700 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4701 block_stmt_iterator bsi = bsi_last (bb);
4702 tree t = NULL_TREE;
4703 if (!bsi_end_p (bsi))
4704 t = bsi_stmt (bsi);
4705
4706 if (need_fake_edge_p (t))
4707 {
4708 edge e;
4709
4710 e = find_edge (bb, EXIT_BLOCK_PTR);
4711 if (e)
4712 {
4713 bsi_insert_on_edge (e, build_empty_stmt ());
4714 bsi_commit_edge_inserts ();
4715 }
4716 }
4717 }
4718
4719 /* Now add fake edges to the function exit for any non constant
4720 calls since there is no way that we can determine if they will
4721 return or not... */
4722 for (i = 0; i < last_bb; i++)
4723 {
4724 basic_block bb = BASIC_BLOCK (i);
4725 block_stmt_iterator bsi;
4726 tree stmt, last_stmt;
4727
4728 if (!bb)
4729 continue;
4730
4731 if (blocks && !TEST_BIT (blocks, i))
4732 continue;
4733
4734 bsi = bsi_last (bb);
4735 if (!bsi_end_p (bsi))
4736 {
4737 last_stmt = bsi_stmt (bsi);
4738 do
4739 {
4740 stmt = bsi_stmt (bsi);
4741 if (need_fake_edge_p (stmt))
4742 {
4743 edge e;
4744 /* The handling above of the final block before the
4745 epilogue should be enough to verify that there is
4746 no edge to the exit block in CFG already.
4747 Calling make_edge in such case would cause us to
4748 mark that edge as fake and remove it later. */
4749 #ifdef ENABLE_CHECKING
4750 if (stmt == last_stmt)
4751 {
4752 e = find_edge (bb, EXIT_BLOCK_PTR);
4753 gcc_assert (e == NULL);
4754 }
4755 #endif
4756
4757 /* Note that the following may create a new basic block
4758 and renumber the existing basic blocks. */
4759 if (stmt != last_stmt)
4760 {
4761 e = split_block (bb, stmt);
4762 if (e)
4763 blocks_split++;
4764 }
4765 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4766 }
4767 bsi_prev (&bsi);
4768 }
4769 while (!bsi_end_p (bsi));
4770 }
4771 }
4772
4773 if (blocks_split)
4774 verify_flow_info ();
4775
4776 return blocks_split;
4777 }
4778
4779 bool
4780 tree_purge_dead_eh_edges (basic_block bb)
4781 {
4782 bool changed = false;
4783 edge e;
4784 edge_iterator ei;
4785 tree stmt = last_stmt (bb);
4786
4787 if (stmt && tree_can_throw_internal (stmt))
4788 return false;
4789
4790 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4791 {
4792 if (e->flags & EDGE_EH)
4793 {
4794 remove_edge (e);
4795 changed = true;
4796 }
4797 else
4798 ei_next (&ei);
4799 }
4800
4801 /* Removal of dead EH edges might change dominators of not
4802 just immediate successors. E.g. when bb1 is changed so that
4803 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
4804 eh edges purged by this function in:
4805 0
4806 / \
4807 v v
4808 1-->2
4809 / \ |
4810 v v |
4811 3-->4 |
4812 \ v
4813 --->5
4814 |
4815 -
4816 idom(bb5) must be recomputed. For now just free the dominance
4817 info. */
4818 if (changed)
4819 free_dominance_info (CDI_DOMINATORS);
4820
4821 return changed;
4822 }
4823
4824 bool
4825 tree_purge_all_dead_eh_edges (bitmap blocks)
4826 {
4827 bool changed = false;
4828 unsigned i;
4829 bitmap_iterator bi;
4830
4831 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
4832 {
4833 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
4834 }
4835
4836 return changed;
4837 }
4838
4839 /* This function is called whenever a new edge is created or
4840 redirected. */
4841
4842 static void
4843 tree_execute_on_growing_pred (edge e)
4844 {
4845 basic_block bb = e->dest;
4846
4847 if (phi_nodes (bb))
4848 reserve_phi_args_for_new_edge (bb);
4849 }
4850
4851 /* This function is called immediately before edge E is removed from
4852 the edge vector E->dest->preds. */
4853
4854 static void
4855 tree_execute_on_shrinking_pred (edge e)
4856 {
4857 if (phi_nodes (e->dest))
4858 remove_phi_args (e);
4859 }
4860
4861 /*---------------------------------------------------------------------------
4862 Helper functions for Loop versioning
4863 ---------------------------------------------------------------------------*/
4864
4865 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
4866 of 'first'. Both of them are dominated by 'new_head' basic block. When
4867 'new_head' was created by 'second's incoming edge it received phi arguments
4868 on the edge by split_edge(). Later, additional edge 'e' was created to
4869 connect 'new_head' and 'first'. Now this routine adds phi args on this
4870 additional edge 'e' that new_head to second edge received as part of edge
4871 splitting.
4872 */
4873
4874 static void
4875 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
4876 basic_block new_head, edge e)
4877 {
4878 tree phi1, phi2;
4879 edge e2 = find_edge (new_head, second);
4880
4881 /* Because NEW_HEAD has been created by splitting SECOND's incoming
4882 edge, we should always have an edge from NEW_HEAD to SECOND. */
4883 gcc_assert (e2 != NULL);
4884
4885 /* Browse all 'second' basic block phi nodes and add phi args to
4886 edge 'e' for 'first' head. PHI args are always in correct order. */
4887
4888 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
4889 phi2 && phi1;
4890 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
4891 {
4892 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
4893 add_phi_arg (phi1, def, e);
4894 }
4895 }
4896
4897 /* Adds a if else statement to COND_BB with condition COND_EXPR.
4898 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
4899 the destination of the ELSE part. */
4900 static void
4901 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
4902 basic_block cond_bb, void *cond_e)
4903 {
4904 block_stmt_iterator bsi;
4905 tree goto1 = NULL_TREE;
4906 tree goto2 = NULL_TREE;
4907 tree new_cond_expr = NULL_TREE;
4908 tree cond_expr = (tree) cond_e;
4909 edge e0;
4910
4911 /* Build new conditional expr */
4912 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
4913 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
4914 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
4915
4916 /* Add new cond in cond_bb. */
4917 bsi = bsi_start (cond_bb);
4918 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
4919 /* Adjust edges appropriately to connect new head with first head
4920 as well as second head. */
4921 e0 = single_succ_edge (cond_bb);
4922 e0->flags &= ~EDGE_FALLTHRU;
4923 e0->flags |= EDGE_FALSE_VALUE;
4924 }
4925
4926 struct cfg_hooks tree_cfg_hooks = {
4927 "tree",
4928 tree_verify_flow_info,
4929 tree_dump_bb, /* dump_bb */
4930 create_bb, /* create_basic_block */
4931 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
4932 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
4933 remove_bb, /* delete_basic_block */
4934 tree_split_block, /* split_block */
4935 tree_move_block_after, /* move_block_after */
4936 tree_can_merge_blocks_p, /* can_merge_blocks_p */
4937 tree_merge_blocks, /* merge_blocks */
4938 tree_predict_edge, /* predict_edge */
4939 tree_predicted_by_p, /* predicted_by_p */
4940 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
4941 tree_duplicate_bb, /* duplicate_block */
4942 tree_split_edge, /* split_edge */
4943 tree_make_forwarder_block, /* make_forward_block */
4944 NULL, /* tidy_fallthru_edge */
4945 tree_block_ends_with_call_p, /* block_ends_with_call_p */
4946 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
4947 tree_flow_call_edges_add, /* flow_call_edges_add */
4948 tree_execute_on_growing_pred, /* execute_on_growing_pred */
4949 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
4950 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
4951 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4952 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
4953 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
4954 flush_pending_stmts /* flush_pending_stmts */
4955 };
4956
4957
4958 /* Split all critical edges. */
4959
4960 static void
4961 split_critical_edges (void)
4962 {
4963 basic_block bb;
4964 edge e;
4965 edge_iterator ei;
4966
4967 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
4968 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
4969 mappings around the calls to split_edge. */
4970 start_recording_case_labels ();
4971 FOR_ALL_BB (bb)
4972 {
4973 FOR_EACH_EDGE (e, ei, bb->succs)
4974 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
4975 {
4976 split_edge (e);
4977 }
4978 }
4979 end_recording_case_labels ();
4980 }
4981
4982 struct tree_opt_pass pass_split_crit_edges =
4983 {
4984 "crited", /* name */
4985 NULL, /* gate */
4986 split_critical_edges, /* execute */
4987 NULL, /* sub */
4988 NULL, /* next */
4989 0, /* static_pass_number */
4990 TV_TREE_SPLIT_EDGES, /* tv_id */
4991 PROP_cfg, /* properties required */
4992 PROP_no_crit_edges, /* properties_provided */
4993 0, /* properties_destroyed */
4994 0, /* todo_flags_start */
4995 TODO_dump_func, /* todo_flags_finish */
4996 0 /* letter */
4997 };
4998
4999 \f
5000 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5001 a temporary, make sure and register it to be renamed if necessary,
5002 and finally return the temporary. Put the statements to compute
5003 EXP before the current statement in BSI. */
5004
5005 tree
5006 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5007 {
5008 tree t, new_stmt, orig_stmt;
5009
5010 if (is_gimple_val (exp))
5011 return exp;
5012
5013 t = make_rename_temp (type, NULL);
5014 new_stmt = build (MODIFY_EXPR, type, t, exp);
5015
5016 orig_stmt = bsi_stmt (*bsi);
5017 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5018 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5019
5020 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5021
5022 return t;
5023 }
5024
5025 /* Build a ternary operation and gimplify it. Emit code before BSI.
5026 Return the gimple_val holding the result. */
5027
5028 tree
5029 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5030 tree type, tree a, tree b, tree c)
5031 {
5032 tree ret;
5033
5034 ret = fold_build3 (code, type, a, b, c);
5035 STRIP_NOPS (ret);
5036
5037 return gimplify_val (bsi, type, ret);
5038 }
5039
5040 /* Build a binary operation and gimplify it. Emit code before BSI.
5041 Return the gimple_val holding the result. */
5042
5043 tree
5044 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5045 tree type, tree a, tree b)
5046 {
5047 tree ret;
5048
5049 ret = fold_build2 (code, type, a, b);
5050 STRIP_NOPS (ret);
5051
5052 return gimplify_val (bsi, type, ret);
5053 }
5054
5055 /* Build a unary operation and gimplify it. Emit code before BSI.
5056 Return the gimple_val holding the result. */
5057
5058 tree
5059 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5060 tree a)
5061 {
5062 tree ret;
5063
5064 ret = fold_build1 (code, type, a);
5065 STRIP_NOPS (ret);
5066
5067 return gimplify_val (bsi, type, ret);
5068 }
5069
5070
5071 \f
5072 /* Emit return warnings. */
5073
5074 static void
5075 execute_warn_function_return (void)
5076 {
5077 #ifdef USE_MAPPED_LOCATION
5078 source_location location;
5079 #else
5080 location_t *locus;
5081 #endif
5082 tree last;
5083 edge e;
5084 edge_iterator ei;
5085
5086 /* If we have a path to EXIT, then we do return. */
5087 if (TREE_THIS_VOLATILE (cfun->decl)
5088 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5089 {
5090 #ifdef USE_MAPPED_LOCATION
5091 location = UNKNOWN_LOCATION;
5092 #else
5093 locus = NULL;
5094 #endif
5095 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5096 {
5097 last = last_stmt (e->src);
5098 if (TREE_CODE (last) == RETURN_EXPR
5099 #ifdef USE_MAPPED_LOCATION
5100 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5101 #else
5102 && (locus = EXPR_LOCUS (last)) != NULL)
5103 #endif
5104 break;
5105 }
5106 #ifdef USE_MAPPED_LOCATION
5107 if (location == UNKNOWN_LOCATION)
5108 location = cfun->function_end_locus;
5109 warning (0, "%H%<noreturn%> function does return", &location);
5110 #else
5111 if (!locus)
5112 locus = &cfun->function_end_locus;
5113 warning (0, "%H%<noreturn%> function does return", locus);
5114 #endif
5115 }
5116
5117 /* If we see "return;" in some basic block, then we do reach the end
5118 without returning a value. */
5119 else if (warn_return_type
5120 && !TREE_NO_WARNING (cfun->decl)
5121 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5122 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5123 {
5124 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5125 {
5126 tree last = last_stmt (e->src);
5127 if (TREE_CODE (last) == RETURN_EXPR
5128 && TREE_OPERAND (last, 0) == NULL)
5129 {
5130 #ifdef USE_MAPPED_LOCATION
5131 location = EXPR_LOCATION (last);
5132 if (location == UNKNOWN_LOCATION)
5133 location = cfun->function_end_locus;
5134 warning (0, "%Hcontrol reaches end of non-void function", &location);
5135 #else
5136 locus = EXPR_LOCUS (last);
5137 if (!locus)
5138 locus = &cfun->function_end_locus;
5139 warning (0, "%Hcontrol reaches end of non-void function", locus);
5140 #endif
5141 TREE_NO_WARNING (cfun->decl) = 1;
5142 break;
5143 }
5144 }
5145 }
5146 }
5147
5148
5149 /* Given a basic block B which ends with a conditional and has
5150 precisely two successors, determine which of the edges is taken if
5151 the conditional is true and which is taken if the conditional is
5152 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5153
5154 void
5155 extract_true_false_edges_from_block (basic_block b,
5156 edge *true_edge,
5157 edge *false_edge)
5158 {
5159 edge e = EDGE_SUCC (b, 0);
5160
5161 if (e->flags & EDGE_TRUE_VALUE)
5162 {
5163 *true_edge = e;
5164 *false_edge = EDGE_SUCC (b, 1);
5165 }
5166 else
5167 {
5168 *false_edge = e;
5169 *true_edge = EDGE_SUCC (b, 1);
5170 }
5171 }
5172
5173 struct tree_opt_pass pass_warn_function_return =
5174 {
5175 NULL, /* name */
5176 NULL, /* gate */
5177 execute_warn_function_return, /* execute */
5178 NULL, /* sub */
5179 NULL, /* next */
5180 0, /* static_pass_number */
5181 0, /* tv_id */
5182 PROP_cfg, /* properties_required */
5183 0, /* properties_provided */
5184 0, /* properties_destroyed */
5185 0, /* todo_flags_start */
5186 0, /* todo_flags_finish */
5187 0 /* letter */
5188 };
5189
5190 /* Emit noreturn warnings. */
5191
5192 static void
5193 execute_warn_function_noreturn (void)
5194 {
5195 if (warn_missing_noreturn
5196 && !TREE_THIS_VOLATILE (cfun->decl)
5197 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5198 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5199 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5200 "for attribute %<noreturn%>",
5201 cfun->decl);
5202 }
5203
5204 struct tree_opt_pass pass_warn_function_noreturn =
5205 {
5206 NULL, /* name */
5207 NULL, /* gate */
5208 execute_warn_function_noreturn, /* execute */
5209 NULL, /* sub */
5210 NULL, /* next */
5211 0, /* static_pass_number */
5212 0, /* tv_id */
5213 PROP_cfg, /* properties_required */
5214 0, /* properties_provided */
5215 0, /* properties_destroyed */
5216 0, /* todo_flags_start */
5217 0, /* todo_flags_finish */
5218 0 /* letter */
5219 };