tree-cfg.c (thread_jumps): Speed up by keeping a pointer to the last used element...
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "errors.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47
48 /* This file contains functions for building the Control Flow Graph (CFG)
49 for a function tree. */
50
51 /* Local declarations. */
52
53 /* Initial capacity for the basic block array. */
54 static const int initial_cfg_capacity = 20;
55
56 /* Mapping of labels to their associated blocks. This can greatly speed up
57 building of the CFG in code with lots of gotos. */
58 static GTY(()) varray_type label_to_block_map;
59
60 /* CFG statistics. */
61 struct cfg_stats_d
62 {
63 long num_merged_labels;
64 };
65
66 static struct cfg_stats_d cfg_stats;
67
68 /* Nonzero if we found a computed goto while building basic blocks. */
69 static bool found_computed_goto;
70
71 /* Basic blocks and flowgraphs. */
72 static basic_block create_bb (void *, void *, basic_block);
73 static void create_block_annotation (basic_block);
74 static void free_blocks_annotations (void);
75 static void clear_blocks_annotations (void);
76 static void make_blocks (tree);
77 static void factor_computed_gotos (void);
78
79 /* Edges. */
80 static void make_edges (void);
81 static void make_ctrl_stmt_edges (basic_block);
82 static void make_exit_edges (basic_block);
83 static void make_cond_expr_edges (basic_block);
84 static void make_switch_expr_edges (basic_block);
85 static void make_goto_expr_edges (basic_block);
86 static edge tree_redirect_edge_and_branch (edge, basic_block);
87 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
88 static void split_critical_edges (void);
89
90 /* Various helpers. */
91 static inline bool stmt_starts_bb_p (tree, tree);
92 static int tree_verify_flow_info (void);
93 static void tree_make_forwarder_block (edge);
94 static bool thread_jumps (void);
95 static bool tree_forwarder_block_p (basic_block);
96 static void tree_cfg2vcg (FILE *);
97
98 /* Flowgraph optimization and cleanup. */
99 static void tree_merge_blocks (basic_block, basic_block);
100 static bool tree_can_merge_blocks_p (basic_block, basic_block);
101 static void remove_bb (basic_block);
102 static bool cleanup_control_flow (void);
103 static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
104 static edge find_taken_edge_cond_expr (basic_block, tree);
105 static edge find_taken_edge_switch_expr (basic_block, tree);
106 static tree find_case_label_for_value (tree, tree);
107 static bool phi_alternatives_equal (basic_block, edge, edge);
108
109
110 /*---------------------------------------------------------------------------
111 Create basic blocks
112 ---------------------------------------------------------------------------*/
113
114 /* Entry point to the CFG builder for trees. TP points to the list of
115 statements to be added to the flowgraph. */
116
117 static void
118 build_tree_cfg (tree *tp)
119 {
120 /* Register specific tree functions. */
121 tree_register_cfg_hooks ();
122
123 /* Initialize rbi_pool. */
124 alloc_rbi_pool ();
125
126 /* Initialize the basic block array. */
127 init_flow ();
128 profile_status = PROFILE_ABSENT;
129 n_basic_blocks = 0;
130 last_basic_block = 0;
131 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
132 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
133
134 /* Build a mapping of labels to their associated blocks. */
135 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
136 "label to block map");
137
138 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
139 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
140
141 found_computed_goto = 0;
142 make_blocks (*tp);
143
144 /* Computed gotos are hell to deal with, especially if there are
145 lots of them with a large number of destinations. So we factor
146 them to a common computed goto location before we build the
147 edge list. After we convert back to normal form, we will un-factor
148 the computed gotos since factoring introduces an unwanted jump. */
149 if (found_computed_goto)
150 factor_computed_gotos ();
151
152 /* Make sure there is always at least one block, even if it's empty. */
153 if (n_basic_blocks == 0)
154 create_empty_bb (ENTRY_BLOCK_PTR);
155
156 create_block_annotation (ENTRY_BLOCK_PTR);
157 create_block_annotation (EXIT_BLOCK_PTR);
158
159 /* Adjust the size of the array. */
160 VARRAY_GROW (basic_block_info, n_basic_blocks);
161
162 /* To speed up statement iterator walks, we first purge dead labels. */
163 cleanup_dead_labels ();
164
165 /* Group case nodes to reduce the number of edges.
166 We do this after cleaning up dead labels because otherwise we miss
167 a lot of obvious case merging opportunities. */
168 group_case_labels ();
169
170 /* Create the edges of the flowgraph. */
171 make_edges ();
172
173 /* Debugging dumps. */
174
175 /* Write the flowgraph to a VCG file. */
176 {
177 int local_dump_flags;
178 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
179 if (dump_file)
180 {
181 tree_cfg2vcg (dump_file);
182 dump_end (TDI_vcg, dump_file);
183 }
184 }
185
186 /* Dump a textual representation of the flowgraph. */
187 if (dump_file)
188 dump_tree_cfg (dump_file, dump_flags);
189 }
190
191 static void
192 execute_build_cfg (void)
193 {
194 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
195 }
196
197 struct tree_opt_pass pass_build_cfg =
198 {
199 "cfg", /* name */
200 NULL, /* gate */
201 execute_build_cfg, /* execute */
202 NULL, /* sub */
203 NULL, /* next */
204 0, /* static_pass_number */
205 TV_TREE_CFG, /* tv_id */
206 PROP_gimple_leh, /* properties_required */
207 PROP_cfg, /* properties_provided */
208 0, /* properties_destroyed */
209 0, /* todo_flags_start */
210 TODO_verify_stmts, /* todo_flags_finish */
211 0 /* letter */
212 };
213
214 /* Search the CFG for any computed gotos. If found, factor them to a
215 common computed goto site. Also record the location of that site so
216 that we can un-factor the gotos after we have converted back to
217 normal form. */
218
219 static void
220 factor_computed_gotos (void)
221 {
222 basic_block bb;
223 tree factored_label_decl = NULL;
224 tree var = NULL;
225 tree factored_computed_goto_label = NULL;
226 tree factored_computed_goto = NULL;
227
228 /* We know there are one or more computed gotos in this function.
229 Examine the last statement in each basic block to see if the block
230 ends with a computed goto. */
231
232 FOR_EACH_BB (bb)
233 {
234 block_stmt_iterator bsi = bsi_last (bb);
235 tree last;
236
237 if (bsi_end_p (bsi))
238 continue;
239 last = bsi_stmt (bsi);
240
241 /* Ignore the computed goto we create when we factor the original
242 computed gotos. */
243 if (last == factored_computed_goto)
244 continue;
245
246 /* If the last statement is a computed goto, factor it. */
247 if (computed_goto_p (last))
248 {
249 tree assignment;
250
251 /* The first time we find a computed goto we need to create
252 the factored goto block and the variable each original
253 computed goto will use for their goto destination. */
254 if (! factored_computed_goto)
255 {
256 basic_block new_bb = create_empty_bb (bb);
257 block_stmt_iterator new_bsi = bsi_start (new_bb);
258
259 /* Create the destination of the factored goto. Each original
260 computed goto will put its desired destination into this
261 variable and jump to the label we create immediately
262 below. */
263 var = create_tmp_var (ptr_type_node, "gotovar");
264
265 /* Build a label for the new block which will contain the
266 factored computed goto. */
267 factored_label_decl = create_artificial_label ();
268 factored_computed_goto_label
269 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
270 bsi_insert_after (&new_bsi, factored_computed_goto_label,
271 BSI_NEW_STMT);
272
273 /* Build our new computed goto. */
274 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
275 bsi_insert_after (&new_bsi, factored_computed_goto,
276 BSI_NEW_STMT);
277 }
278
279 /* Copy the original computed goto's destination into VAR. */
280 assignment = build (MODIFY_EXPR, ptr_type_node,
281 var, GOTO_DESTINATION (last));
282 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
283
284 /* And re-vector the computed goto to the new destination. */
285 GOTO_DESTINATION (last) = factored_label_decl;
286 }
287 }
288 }
289
290
291 /* Create annotations for a single basic block. */
292
293 static void
294 create_block_annotation (basic_block bb)
295 {
296 /* Verify that the tree_annotations field is clear. */
297 gcc_assert (!bb->tree_annotations);
298 bb->tree_annotations = ggc_alloc_cleared (sizeof (struct bb_ann_d));
299 }
300
301
302 /* Free the annotations for all the basic blocks. */
303
304 static void free_blocks_annotations (void)
305 {
306 clear_blocks_annotations ();
307 }
308
309
310 /* Clear the annotations for all the basic blocks. */
311
312 static void
313 clear_blocks_annotations (void)
314 {
315 basic_block bb;
316
317 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
318 bb->tree_annotations = NULL;
319 }
320
321
322 /* Build a flowgraph for the statement_list STMT_LIST. */
323
324 static void
325 make_blocks (tree stmt_list)
326 {
327 tree_stmt_iterator i = tsi_start (stmt_list);
328 tree stmt = NULL;
329 bool start_new_block = true;
330 bool first_stmt_of_list = true;
331 basic_block bb = ENTRY_BLOCK_PTR;
332
333 while (!tsi_end_p (i))
334 {
335 tree prev_stmt;
336
337 prev_stmt = stmt;
338 stmt = tsi_stmt (i);
339
340 /* If the statement starts a new basic block or if we have determined
341 in a previous pass that we need to create a new block for STMT, do
342 so now. */
343 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
344 {
345 if (!first_stmt_of_list)
346 stmt_list = tsi_split_statement_list_before (&i);
347 bb = create_basic_block (stmt_list, NULL, bb);
348 start_new_block = false;
349 }
350
351 /* Now add STMT to BB and create the subgraphs for special statement
352 codes. */
353 set_bb_for_stmt (stmt, bb);
354
355 if (computed_goto_p (stmt))
356 found_computed_goto = true;
357
358 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
359 next iteration. */
360 if (stmt_ends_bb_p (stmt))
361 start_new_block = true;
362
363 tsi_next (&i);
364 first_stmt_of_list = false;
365 }
366 }
367
368
369 /* Create and return a new empty basic block after bb AFTER. */
370
371 static basic_block
372 create_bb (void *h, void *e, basic_block after)
373 {
374 basic_block bb;
375
376 gcc_assert (!e);
377
378 /* Create and initialize a new basic block. */
379 bb = alloc_block ();
380 memset (bb, 0, sizeof (*bb));
381
382 bb->index = last_basic_block;
383 bb->flags = BB_NEW;
384 bb->stmt_list = h ? h : alloc_stmt_list ();
385
386 /* Add the new block to the linked list of blocks. */
387 link_block (bb, after);
388
389 /* Grow the basic block array if needed. */
390 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
391 {
392 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
393 VARRAY_GROW (basic_block_info, new_size);
394 }
395
396 /* Add the newly created block to the array. */
397 BASIC_BLOCK (last_basic_block) = bb;
398
399 create_block_annotation (bb);
400
401 n_basic_blocks++;
402 last_basic_block++;
403
404 initialize_bb_rbi (bb);
405 return bb;
406 }
407
408
409 /*---------------------------------------------------------------------------
410 Edge creation
411 ---------------------------------------------------------------------------*/
412
413 /* Join all the blocks in the flowgraph. */
414
415 static void
416 make_edges (void)
417 {
418 basic_block bb;
419
420 /* Create an edge from entry to the first block with executable
421 statements in it. */
422 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
423
424 /* Traverse basic block array placing edges. */
425 FOR_EACH_BB (bb)
426 {
427 tree first = first_stmt (bb);
428 tree last = last_stmt (bb);
429
430 if (first)
431 {
432 /* Edges for statements that always alter flow control. */
433 if (is_ctrl_stmt (last))
434 make_ctrl_stmt_edges (bb);
435
436 /* Edges for statements that sometimes alter flow control. */
437 if (is_ctrl_altering_stmt (last))
438 make_exit_edges (bb);
439 }
440
441 /* Finally, if no edges were created above, this is a regular
442 basic block that only needs a fallthru edge. */
443 if (EDGE_COUNT (bb->succs) == 0)
444 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
445 }
446
447 /* We do not care about fake edges, so remove any that the CFG
448 builder inserted for completeness. */
449 remove_fake_exit_edges ();
450
451 /* Clean up the graph and warn for unreachable code. */
452 cleanup_tree_cfg ();
453 }
454
455
456 /* Create edges for control statement at basic block BB. */
457
458 static void
459 make_ctrl_stmt_edges (basic_block bb)
460 {
461 tree last = last_stmt (bb);
462
463 gcc_assert (last);
464 switch (TREE_CODE (last))
465 {
466 case GOTO_EXPR:
467 make_goto_expr_edges (bb);
468 break;
469
470 case RETURN_EXPR:
471 make_edge (bb, EXIT_BLOCK_PTR, 0);
472 break;
473
474 case COND_EXPR:
475 make_cond_expr_edges (bb);
476 break;
477
478 case SWITCH_EXPR:
479 make_switch_expr_edges (bb);
480 break;
481
482 case RESX_EXPR:
483 make_eh_edges (last);
484 /* Yet another NORETURN hack. */
485 if (EDGE_COUNT (bb->succs) == 0)
486 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
487 break;
488
489 default:
490 gcc_unreachable ();
491 }
492 }
493
494
495 /* Create exit edges for statements in block BB that alter the flow of
496 control. Statements that alter the control flow are 'goto', 'return'
497 and calls to non-returning functions. */
498
499 static void
500 make_exit_edges (basic_block bb)
501 {
502 tree last = last_stmt (bb), op;
503
504 gcc_assert (last);
505 switch (TREE_CODE (last))
506 {
507 case CALL_EXPR:
508 /* If this function receives a nonlocal goto, then we need to
509 make edges from this call site to all the nonlocal goto
510 handlers. */
511 if (TREE_SIDE_EFFECTS (last)
512 && current_function_has_nonlocal_label)
513 make_goto_expr_edges (bb);
514
515 /* If this statement has reachable exception handlers, then
516 create abnormal edges to them. */
517 make_eh_edges (last);
518
519 /* Some calls are known not to return. For such calls we create
520 a fake edge.
521
522 We really need to revamp how we build edges so that it's not
523 such a bloody pain to avoid creating edges for this case since
524 all we do is remove these edges when we're done building the
525 CFG. */
526 if (call_expr_flags (last) & (ECF_NORETURN | ECF_LONGJMP))
527 {
528 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
529 return;
530 }
531
532 /* Don't forget the fall-thru edge. */
533 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
534 break;
535
536 case MODIFY_EXPR:
537 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
538 may have an abnormal edge. Search the RHS for this case and
539 create any required edges. */
540 op = get_call_expr_in (last);
541 if (op && TREE_SIDE_EFFECTS (op)
542 && current_function_has_nonlocal_label)
543 make_goto_expr_edges (bb);
544
545 make_eh_edges (last);
546 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
547 break;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554
555 /* Create the edges for a COND_EXPR starting at block BB.
556 At this point, both clauses must contain only simple gotos. */
557
558 static void
559 make_cond_expr_edges (basic_block bb)
560 {
561 tree entry = last_stmt (bb);
562 basic_block then_bb, else_bb;
563 tree then_label, else_label;
564
565 gcc_assert (entry);
566 gcc_assert (TREE_CODE (entry) == COND_EXPR);
567
568 /* Entry basic blocks for each component. */
569 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
570 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
571 then_bb = label_to_block (then_label);
572 else_bb = label_to_block (else_label);
573
574 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
575 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
576 }
577
578
579 /* Create the edges for a SWITCH_EXPR starting at block BB.
580 At this point, the switch body has been lowered and the
581 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
582
583 static void
584 make_switch_expr_edges (basic_block bb)
585 {
586 tree entry = last_stmt (bb);
587 size_t i, n;
588 tree vec;
589
590 vec = SWITCH_LABELS (entry);
591 n = TREE_VEC_LENGTH (vec);
592
593 for (i = 0; i < n; ++i)
594 {
595 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
596 basic_block label_bb = label_to_block (lab);
597 make_edge (bb, label_bb, 0);
598 }
599 }
600
601
602 /* Return the basic block holding label DEST. */
603
604 basic_block
605 label_to_block (tree dest)
606 {
607 int uid = LABEL_DECL_UID (dest);
608
609 /* We would die hard when faced by an undefined label. Emit a label to
610 the very first basic block. This will hopefully make even the dataflow
611 and undefined variable warnings quite right. */
612 if ((errorcount || sorrycount) && uid < 0)
613 {
614 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
615 tree stmt;
616
617 stmt = build1 (LABEL_EXPR, void_type_node, dest);
618 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
619 uid = LABEL_DECL_UID (dest);
620 }
621 return VARRAY_BB (label_to_block_map, uid);
622 }
623
624
625 /* Create edges for a goto statement at block BB. */
626
627 static void
628 make_goto_expr_edges (basic_block bb)
629 {
630 tree goto_t, dest;
631 basic_block target_bb;
632 int for_call;
633 block_stmt_iterator last = bsi_last (bb);
634
635 goto_t = bsi_stmt (last);
636
637 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
638 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
639 from a nonlocal goto. */
640 if (TREE_CODE (goto_t) != GOTO_EXPR)
641 {
642 dest = error_mark_node;
643 for_call = 1;
644 }
645 else
646 {
647 dest = GOTO_DESTINATION (goto_t);
648 for_call = 0;
649
650 /* A GOTO to a local label creates normal edges. */
651 if (simple_goto_p (goto_t))
652 {
653 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
654 #ifdef USE_MAPPED_LOCATION
655 e->goto_locus = EXPR_LOCATION (goto_t);
656 #else
657 e->goto_locus = EXPR_LOCUS (goto_t);
658 #endif
659 bsi_remove (&last);
660 return;
661 }
662
663 /* Nothing more to do for nonlocal gotos. */
664 if (TREE_CODE (dest) == LABEL_DECL)
665 return;
666
667 /* Computed gotos remain. */
668 }
669
670 /* Look for the block starting with the destination label. In the
671 case of a computed goto, make an edge to any label block we find
672 in the CFG. */
673 FOR_EACH_BB (target_bb)
674 {
675 block_stmt_iterator bsi;
676
677 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
678 {
679 tree target = bsi_stmt (bsi);
680
681 if (TREE_CODE (target) != LABEL_EXPR)
682 break;
683
684 if (
685 /* Computed GOTOs. Make an edge to every label block that has
686 been marked as a potential target for a computed goto. */
687 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
688 /* Nonlocal GOTO target. Make an edge to every label block
689 that has been marked as a potential target for a nonlocal
690 goto. */
691 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
692 {
693 make_edge (bb, target_bb, EDGE_ABNORMAL);
694 break;
695 }
696 }
697 }
698
699 /* Degenerate case of computed goto with no labels. */
700 if (!for_call && EDGE_COUNT (bb->succs) == 0)
701 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
702 }
703
704
705 /*---------------------------------------------------------------------------
706 Flowgraph analysis
707 ---------------------------------------------------------------------------*/
708
709 /* Remove unreachable blocks and other miscellaneous clean up work. */
710
711 bool
712 cleanup_tree_cfg (void)
713 {
714 bool retval = false;
715
716 timevar_push (TV_TREE_CLEANUP_CFG);
717
718 retval = cleanup_control_flow ();
719 retval |= delete_unreachable_blocks ();
720 retval |= thread_jumps ();
721
722 #ifdef ENABLE_CHECKING
723 if (retval)
724 {
725 gcc_assert (!cleanup_control_flow ());
726 gcc_assert (!delete_unreachable_blocks ());
727 gcc_assert (!thread_jumps ());
728 }
729 #endif
730
731 /* Merging the blocks creates no new opportunities for the other
732 optimizations, so do it here. */
733 merge_seq_blocks ();
734
735 compact_blocks ();
736
737 #ifdef ENABLE_CHECKING
738 verify_flow_info ();
739 #endif
740 timevar_pop (TV_TREE_CLEANUP_CFG);
741 return retval;
742 }
743
744
745 /* Cleanup useless labels in basic blocks. This is something we wish
746 to do early because it allows us to group case labels before creating
747 the edges for the CFG, and it speeds up block statement iterators in
748 all passes later on.
749 We only run this pass once, running it more than once is probably not
750 profitable. */
751
752 /* A map from basic block index to the leading label of that block. */
753 static tree *label_for_bb;
754
755 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
756 static void
757 update_eh_label (struct eh_region *region)
758 {
759 tree old_label = get_eh_region_tree_label (region);
760 if (old_label)
761 {
762 tree new_label;
763 basic_block bb = label_to_block (old_label);
764
765 /* ??? After optimizing, there may be EH regions with labels
766 that have already been removed from the function body, so
767 there is no basic block for them. */
768 if (! bb)
769 return;
770
771 new_label = label_for_bb[bb->index];
772 set_eh_region_tree_label (region, new_label);
773 }
774 }
775
776 /* Given LABEL return the first label in the same basic block. */
777 static tree
778 main_block_label (tree label)
779 {
780 basic_block bb = label_to_block (label);
781
782 /* label_to_block possibly inserted undefined label into the chain. */
783 if (!label_for_bb[bb->index])
784 label_for_bb[bb->index] = label;
785 return label_for_bb[bb->index];
786 }
787
788 /* Cleanup redundant labels. This is a three-step process:
789 1) Find the leading label for each block.
790 2) Redirect all references to labels to the leading labels.
791 3) Cleanup all useless labels. */
792
793 void
794 cleanup_dead_labels (void)
795 {
796 basic_block bb;
797 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
798
799 /* Find a suitable label for each block. We use the first user-defined
800 label if there is one, or otherwise just the first label we see. */
801 FOR_EACH_BB (bb)
802 {
803 block_stmt_iterator i;
804
805 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
806 {
807 tree label, stmt = bsi_stmt (i);
808
809 if (TREE_CODE (stmt) != LABEL_EXPR)
810 break;
811
812 label = LABEL_EXPR_LABEL (stmt);
813
814 /* If we have not yet seen a label for the current block,
815 remember this one and see if there are more labels. */
816 if (! label_for_bb[bb->index])
817 {
818 label_for_bb[bb->index] = label;
819 continue;
820 }
821
822 /* If we did see a label for the current block already, but it
823 is an artificially created label, replace it if the current
824 label is a user defined label. */
825 if (! DECL_ARTIFICIAL (label)
826 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
827 {
828 label_for_bb[bb->index] = label;
829 break;
830 }
831 }
832 }
833
834 /* Now redirect all jumps/branches to the selected label.
835 First do so for each block ending in a control statement. */
836 FOR_EACH_BB (bb)
837 {
838 tree stmt = last_stmt (bb);
839 if (!stmt)
840 continue;
841
842 switch (TREE_CODE (stmt))
843 {
844 case COND_EXPR:
845 {
846 tree true_branch, false_branch;
847
848 true_branch = COND_EXPR_THEN (stmt);
849 false_branch = COND_EXPR_ELSE (stmt);
850
851 GOTO_DESTINATION (true_branch)
852 = main_block_label (GOTO_DESTINATION (true_branch));
853 GOTO_DESTINATION (false_branch)
854 = main_block_label (GOTO_DESTINATION (false_branch));
855
856 break;
857 }
858
859 case SWITCH_EXPR:
860 {
861 size_t i;
862 tree vec = SWITCH_LABELS (stmt);
863 size_t n = TREE_VEC_LENGTH (vec);
864
865 /* Replace all destination labels. */
866 for (i = 0; i < n; ++i)
867 CASE_LABEL (TREE_VEC_ELT (vec, i))
868 = main_block_label (CASE_LABEL (TREE_VEC_ELT (vec, i)));
869
870 break;
871 }
872
873 /* We have to handle GOTO_EXPRs until they're removed, and we don't
874 remove them until after we've created the CFG edges. */
875 case GOTO_EXPR:
876 if (! computed_goto_p (stmt))
877 {
878 GOTO_DESTINATION (stmt)
879 = main_block_label (GOTO_DESTINATION (stmt));
880 break;
881 }
882
883 default:
884 break;
885 }
886 }
887
888 for_each_eh_region (update_eh_label);
889
890 /* Finally, purge dead labels. All user-defined labels and labels that
891 can be the target of non-local gotos are preserved. */
892 FOR_EACH_BB (bb)
893 {
894 block_stmt_iterator i;
895 tree label_for_this_bb = label_for_bb[bb->index];
896
897 if (! label_for_this_bb)
898 continue;
899
900 for (i = bsi_start (bb); !bsi_end_p (i); )
901 {
902 tree label, stmt = bsi_stmt (i);
903
904 if (TREE_CODE (stmt) != LABEL_EXPR)
905 break;
906
907 label = LABEL_EXPR_LABEL (stmt);
908
909 if (label == label_for_this_bb
910 || ! DECL_ARTIFICIAL (label)
911 || DECL_NONLOCAL (label))
912 bsi_next (&i);
913 else
914 bsi_remove (&i);
915 }
916 }
917
918 free (label_for_bb);
919 }
920
921 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
922 and scan the sorted vector of cases. Combine the ones jumping to the
923 same label.
924 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
925
926 void
927 group_case_labels (void)
928 {
929 basic_block bb;
930
931 FOR_EACH_BB (bb)
932 {
933 tree stmt = last_stmt (bb);
934 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
935 {
936 tree labels = SWITCH_LABELS (stmt);
937 int old_size = TREE_VEC_LENGTH (labels);
938 int i, j, new_size = old_size;
939 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
940 tree default_label;
941
942 /* The default label is always the last case in a switch
943 statement after gimplification. */
944 default_label = CASE_LABEL (default_case);
945
946 /* Look for possible opportunities to merge cases.
947 Ignore the last element of the label vector because it
948 must be the default case. */
949 i = 0;
950 while (i < old_size - 1)
951 {
952 tree base_case, base_label, base_high, type;
953 base_case = TREE_VEC_ELT (labels, i);
954
955 gcc_assert (base_case);
956 base_label = CASE_LABEL (base_case);
957
958 /* Discard cases that have the same destination as the
959 default case. */
960 if (base_label == default_label)
961 {
962 TREE_VEC_ELT (labels, i) = NULL_TREE;
963 i++;
964 new_size--;
965 continue;
966 }
967
968 type = TREE_TYPE (CASE_LOW (base_case));
969 base_high = CASE_HIGH (base_case) ?
970 CASE_HIGH (base_case) : CASE_LOW (base_case);
971 i++;
972 /* Try to merge case labels. Break out when we reach the end
973 of the label vector or when we cannot merge the next case
974 label with the current one. */
975 while (i < old_size - 1)
976 {
977 tree merge_case = TREE_VEC_ELT (labels, i);
978 tree merge_label = CASE_LABEL (merge_case);
979 tree t = int_const_binop (PLUS_EXPR, base_high,
980 integer_one_node, 1);
981
982 /* Merge the cases if they jump to the same place,
983 and their ranges are consecutive. */
984 if (merge_label == base_label
985 && tree_int_cst_equal (CASE_LOW (merge_case), t))
986 {
987 base_high = CASE_HIGH (merge_case) ?
988 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
989 CASE_HIGH (base_case) = base_high;
990 TREE_VEC_ELT (labels, i) = NULL_TREE;
991 new_size--;
992 i++;
993 }
994 else
995 break;
996 }
997 }
998
999 /* Compress the case labels in the label vector, and adjust the
1000 length of the vector. */
1001 for (i = 0, j = 0; i < new_size; i++)
1002 {
1003 while (! TREE_VEC_ELT (labels, j))
1004 j++;
1005 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1006 }
1007 TREE_VEC_LENGTH (labels) = new_size;
1008 }
1009 }
1010 }
1011
1012 /* Checks whether we can merge block B into block A. */
1013
1014 static bool
1015 tree_can_merge_blocks_p (basic_block a, basic_block b)
1016 {
1017 tree stmt;
1018 block_stmt_iterator bsi;
1019
1020 if (EDGE_COUNT (a->succs) != 1)
1021 return false;
1022
1023 if (EDGE_SUCC (a, 0)->flags & EDGE_ABNORMAL)
1024 return false;
1025
1026 if (EDGE_SUCC (a, 0)->dest != b)
1027 return false;
1028
1029 if (b == EXIT_BLOCK_PTR)
1030 return false;
1031
1032 if (EDGE_COUNT (b->preds) > 1)
1033 return false;
1034
1035 /* If A ends by a statement causing exceptions or something similar, we
1036 cannot merge the blocks. */
1037 stmt = last_stmt (a);
1038 if (stmt && stmt_ends_bb_p (stmt))
1039 return false;
1040
1041 /* Do not allow a block with only a non-local label to be merged. */
1042 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1043 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1044 return false;
1045
1046 /* There may be no phi nodes at the start of b. Most of these degenerate
1047 phi nodes should be cleaned up by kill_redundant_phi_nodes. */
1048 if (phi_nodes (b))
1049 return false;
1050
1051 /* Do not remove user labels. */
1052 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1053 {
1054 stmt = bsi_stmt (bsi);
1055 if (TREE_CODE (stmt) != LABEL_EXPR)
1056 break;
1057 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1058 return false;
1059 }
1060
1061 return true;
1062 }
1063
1064
1065 /* Merge block B into block A. */
1066
1067 static void
1068 tree_merge_blocks (basic_block a, basic_block b)
1069 {
1070 block_stmt_iterator bsi;
1071 tree_stmt_iterator last;
1072
1073 if (dump_file)
1074 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1075
1076 /* Ensure that B follows A. */
1077 move_block_after (b, a);
1078
1079 gcc_assert (EDGE_SUCC (a, 0)->flags & EDGE_FALLTHRU);
1080 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1081
1082 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1083 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1084 {
1085 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1086 bsi_remove (&bsi);
1087 else
1088 {
1089 set_bb_for_stmt (bsi_stmt (bsi), a);
1090 bsi_next (&bsi);
1091 }
1092 }
1093
1094 /* Merge the chains. */
1095 last = tsi_last (a->stmt_list);
1096 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1097 b->stmt_list = NULL;
1098 }
1099
1100
1101 /* Walk the function tree removing unnecessary statements.
1102
1103 * Empty statement nodes are removed
1104
1105 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1106
1107 * Unnecessary COND_EXPRs are removed
1108
1109 * Some unnecessary BIND_EXPRs are removed
1110
1111 Clearly more work could be done. The trick is doing the analysis
1112 and removal fast enough to be a net improvement in compile times.
1113
1114 Note that when we remove a control structure such as a COND_EXPR
1115 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1116 to ensure we eliminate all the useless code. */
1117
1118 struct rus_data
1119 {
1120 tree *last_goto;
1121 bool repeat;
1122 bool may_throw;
1123 bool may_branch;
1124 bool has_label;
1125 };
1126
1127 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1128
1129 static bool
1130 remove_useless_stmts_warn_notreached (tree stmt)
1131 {
1132 if (EXPR_HAS_LOCATION (stmt))
1133 {
1134 location_t loc = EXPR_LOCATION (stmt);
1135 warning ("%Hwill never be executed", &loc);
1136 return true;
1137 }
1138
1139 switch (TREE_CODE (stmt))
1140 {
1141 case STATEMENT_LIST:
1142 {
1143 tree_stmt_iterator i;
1144 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1145 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1146 return true;
1147 }
1148 break;
1149
1150 case COND_EXPR:
1151 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1152 return true;
1153 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1154 return true;
1155 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1156 return true;
1157 break;
1158
1159 case TRY_FINALLY_EXPR:
1160 case TRY_CATCH_EXPR:
1161 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1162 return true;
1163 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1164 return true;
1165 break;
1166
1167 case CATCH_EXPR:
1168 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1169 case EH_FILTER_EXPR:
1170 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1171 case BIND_EXPR:
1172 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1173
1174 default:
1175 /* Not a live container. */
1176 break;
1177 }
1178
1179 return false;
1180 }
1181
1182 static void
1183 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1184 {
1185 tree then_clause, else_clause, cond;
1186 bool save_has_label, then_has_label, else_has_label;
1187
1188 save_has_label = data->has_label;
1189 data->has_label = false;
1190 data->last_goto = NULL;
1191
1192 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1193
1194 then_has_label = data->has_label;
1195 data->has_label = false;
1196 data->last_goto = NULL;
1197
1198 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1199
1200 else_has_label = data->has_label;
1201 data->has_label = save_has_label | then_has_label | else_has_label;
1202
1203 then_clause = COND_EXPR_THEN (*stmt_p);
1204 else_clause = COND_EXPR_ELSE (*stmt_p);
1205 cond = COND_EXPR_COND (*stmt_p);
1206
1207 /* If neither arm does anything at all, we can remove the whole IF. */
1208 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1209 {
1210 *stmt_p = build_empty_stmt ();
1211 data->repeat = true;
1212 }
1213
1214 /* If there are no reachable statements in an arm, then we can
1215 zap the entire conditional. */
1216 else if (integer_nonzerop (cond) && !else_has_label)
1217 {
1218 if (warn_notreached)
1219 remove_useless_stmts_warn_notreached (else_clause);
1220 *stmt_p = then_clause;
1221 data->repeat = true;
1222 }
1223 else if (integer_zerop (cond) && !then_has_label)
1224 {
1225 if (warn_notreached)
1226 remove_useless_stmts_warn_notreached (then_clause);
1227 *stmt_p = else_clause;
1228 data->repeat = true;
1229 }
1230
1231 /* Check a couple of simple things on then/else with single stmts. */
1232 else
1233 {
1234 tree then_stmt = expr_only (then_clause);
1235 tree else_stmt = expr_only (else_clause);
1236
1237 /* Notice branches to a common destination. */
1238 if (then_stmt && else_stmt
1239 && TREE_CODE (then_stmt) == GOTO_EXPR
1240 && TREE_CODE (else_stmt) == GOTO_EXPR
1241 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1242 {
1243 *stmt_p = then_stmt;
1244 data->repeat = true;
1245 }
1246
1247 /* If the THEN/ELSE clause merely assigns a value to a variable or
1248 parameter which is already known to contain that value, then
1249 remove the useless THEN/ELSE clause. */
1250 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1251 {
1252 if (else_stmt
1253 && TREE_CODE (else_stmt) == MODIFY_EXPR
1254 && TREE_OPERAND (else_stmt, 0) == cond
1255 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1256 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1257 }
1258 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1259 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1260 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1261 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1262 {
1263 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1264 ? then_stmt : else_stmt);
1265 tree *location = (TREE_CODE (cond) == EQ_EXPR
1266 ? &COND_EXPR_THEN (*stmt_p)
1267 : &COND_EXPR_ELSE (*stmt_p));
1268
1269 if (stmt
1270 && TREE_CODE (stmt) == MODIFY_EXPR
1271 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1272 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1273 *location = alloc_stmt_list ();
1274 }
1275 }
1276
1277 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1278 would be re-introduced during lowering. */
1279 data->last_goto = NULL;
1280 }
1281
1282
1283 static void
1284 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1285 {
1286 bool save_may_branch, save_may_throw;
1287 bool this_may_branch, this_may_throw;
1288
1289 /* Collect may_branch and may_throw information for the body only. */
1290 save_may_branch = data->may_branch;
1291 save_may_throw = data->may_throw;
1292 data->may_branch = false;
1293 data->may_throw = false;
1294 data->last_goto = NULL;
1295
1296 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1297
1298 this_may_branch = data->may_branch;
1299 this_may_throw = data->may_throw;
1300 data->may_branch |= save_may_branch;
1301 data->may_throw |= save_may_throw;
1302 data->last_goto = NULL;
1303
1304 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1305
1306 /* If the body is empty, then we can emit the FINALLY block without
1307 the enclosing TRY_FINALLY_EXPR. */
1308 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1309 {
1310 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1311 data->repeat = true;
1312 }
1313
1314 /* If the handler is empty, then we can emit the TRY block without
1315 the enclosing TRY_FINALLY_EXPR. */
1316 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1317 {
1318 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1319 data->repeat = true;
1320 }
1321
1322 /* If the body neither throws, nor branches, then we can safely
1323 string the TRY and FINALLY blocks together. */
1324 else if (!this_may_branch && !this_may_throw)
1325 {
1326 tree stmt = *stmt_p;
1327 *stmt_p = TREE_OPERAND (stmt, 0);
1328 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1329 data->repeat = true;
1330 }
1331 }
1332
1333
1334 static void
1335 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1336 {
1337 bool save_may_throw, this_may_throw;
1338 tree_stmt_iterator i;
1339 tree stmt;
1340
1341 /* Collect may_throw information for the body only. */
1342 save_may_throw = data->may_throw;
1343 data->may_throw = false;
1344 data->last_goto = NULL;
1345
1346 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1347
1348 this_may_throw = data->may_throw;
1349 data->may_throw = save_may_throw;
1350
1351 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1352 if (!this_may_throw)
1353 {
1354 if (warn_notreached)
1355 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1356 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1357 data->repeat = true;
1358 return;
1359 }
1360
1361 /* Process the catch clause specially. We may be able to tell that
1362 no exceptions propagate past this point. */
1363
1364 this_may_throw = true;
1365 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1366 stmt = tsi_stmt (i);
1367 data->last_goto = NULL;
1368
1369 switch (TREE_CODE (stmt))
1370 {
1371 case CATCH_EXPR:
1372 for (; !tsi_end_p (i); tsi_next (&i))
1373 {
1374 stmt = tsi_stmt (i);
1375 /* If we catch all exceptions, then the body does not
1376 propagate exceptions past this point. */
1377 if (CATCH_TYPES (stmt) == NULL)
1378 this_may_throw = false;
1379 data->last_goto = NULL;
1380 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1381 }
1382 break;
1383
1384 case EH_FILTER_EXPR:
1385 if (EH_FILTER_MUST_NOT_THROW (stmt))
1386 this_may_throw = false;
1387 else if (EH_FILTER_TYPES (stmt) == NULL)
1388 this_may_throw = false;
1389 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1390 break;
1391
1392 default:
1393 /* Otherwise this is a cleanup. */
1394 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1395
1396 /* If the cleanup is empty, then we can emit the TRY block without
1397 the enclosing TRY_CATCH_EXPR. */
1398 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1399 {
1400 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1401 data->repeat = true;
1402 }
1403 break;
1404 }
1405 data->may_throw |= this_may_throw;
1406 }
1407
1408
1409 static void
1410 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1411 {
1412 tree block;
1413
1414 /* First remove anything underneath the BIND_EXPR. */
1415 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1416
1417 /* If the BIND_EXPR has no variables, then we can pull everything
1418 up one level and remove the BIND_EXPR, unless this is the toplevel
1419 BIND_EXPR for the current function or an inlined function.
1420
1421 When this situation occurs we will want to apply this
1422 optimization again. */
1423 block = BIND_EXPR_BLOCK (*stmt_p);
1424 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1425 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1426 && (! block
1427 || ! BLOCK_ABSTRACT_ORIGIN (block)
1428 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1429 != FUNCTION_DECL)))
1430 {
1431 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1432 data->repeat = true;
1433 }
1434 }
1435
1436
1437 static void
1438 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1439 {
1440 tree dest = GOTO_DESTINATION (*stmt_p);
1441
1442 data->may_branch = true;
1443 data->last_goto = NULL;
1444
1445 /* Record the last goto expr, so that we can delete it if unnecessary. */
1446 if (TREE_CODE (dest) == LABEL_DECL)
1447 data->last_goto = stmt_p;
1448 }
1449
1450
1451 static void
1452 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1453 {
1454 tree label = LABEL_EXPR_LABEL (*stmt_p);
1455
1456 data->has_label = true;
1457
1458 /* We do want to jump across non-local label receiver code. */
1459 if (DECL_NONLOCAL (label))
1460 data->last_goto = NULL;
1461
1462 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1463 {
1464 *data->last_goto = build_empty_stmt ();
1465 data->repeat = true;
1466 }
1467
1468 /* ??? Add something here to delete unused labels. */
1469 }
1470
1471
1472 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1473 decl. This allows us to eliminate redundant or useless
1474 calls to "const" functions.
1475
1476 Gimplifier already does the same operation, but we may notice functions
1477 being const and pure once their calls has been gimplified, so we need
1478 to update the flag. */
1479
1480 static void
1481 update_call_expr_flags (tree call)
1482 {
1483 tree decl = get_callee_fndecl (call);
1484 if (!decl)
1485 return;
1486 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1487 TREE_SIDE_EFFECTS (call) = 0;
1488 if (TREE_NOTHROW (decl))
1489 TREE_NOTHROW (call) = 1;
1490 }
1491
1492
1493 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1494
1495 void
1496 notice_special_calls (tree t)
1497 {
1498 int flags = call_expr_flags (t);
1499
1500 if (flags & ECF_MAY_BE_ALLOCA)
1501 current_function_calls_alloca = true;
1502 if (flags & ECF_RETURNS_TWICE)
1503 current_function_calls_setjmp = true;
1504 }
1505
1506
1507 /* Clear flags set by notice_special_calls. Used by dead code removal
1508 to update the flags. */
1509
1510 void
1511 clear_special_calls (void)
1512 {
1513 current_function_calls_alloca = false;
1514 current_function_calls_setjmp = false;
1515 }
1516
1517
1518 static void
1519 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1520 {
1521 tree t = *tp, op;
1522
1523 switch (TREE_CODE (t))
1524 {
1525 case COND_EXPR:
1526 remove_useless_stmts_cond (tp, data);
1527 break;
1528
1529 case TRY_FINALLY_EXPR:
1530 remove_useless_stmts_tf (tp, data);
1531 break;
1532
1533 case TRY_CATCH_EXPR:
1534 remove_useless_stmts_tc (tp, data);
1535 break;
1536
1537 case BIND_EXPR:
1538 remove_useless_stmts_bind (tp, data);
1539 break;
1540
1541 case GOTO_EXPR:
1542 remove_useless_stmts_goto (tp, data);
1543 break;
1544
1545 case LABEL_EXPR:
1546 remove_useless_stmts_label (tp, data);
1547 break;
1548
1549 case RETURN_EXPR:
1550 fold_stmt (tp);
1551 data->last_goto = NULL;
1552 data->may_branch = true;
1553 break;
1554
1555 case CALL_EXPR:
1556 fold_stmt (tp);
1557 data->last_goto = NULL;
1558 notice_special_calls (t);
1559 update_call_expr_flags (t);
1560 if (tree_could_throw_p (t))
1561 data->may_throw = true;
1562 break;
1563
1564 case MODIFY_EXPR:
1565 data->last_goto = NULL;
1566 fold_stmt (tp);
1567 op = get_call_expr_in (t);
1568 if (op)
1569 {
1570 update_call_expr_flags (op);
1571 notice_special_calls (op);
1572 }
1573 if (tree_could_throw_p (t))
1574 data->may_throw = true;
1575 break;
1576
1577 case STATEMENT_LIST:
1578 {
1579 tree_stmt_iterator i = tsi_start (t);
1580 while (!tsi_end_p (i))
1581 {
1582 t = tsi_stmt (i);
1583 if (IS_EMPTY_STMT (t))
1584 {
1585 tsi_delink (&i);
1586 continue;
1587 }
1588
1589 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1590
1591 t = tsi_stmt (i);
1592 if (TREE_CODE (t) == STATEMENT_LIST)
1593 {
1594 tsi_link_before (&i, t, TSI_SAME_STMT);
1595 tsi_delink (&i);
1596 }
1597 else
1598 tsi_next (&i);
1599 }
1600 }
1601 break;
1602 case ASM_EXPR:
1603 fold_stmt (tp);
1604 data->last_goto = NULL;
1605 break;
1606
1607 default:
1608 data->last_goto = NULL;
1609 break;
1610 }
1611 }
1612
1613 static void
1614 remove_useless_stmts (void)
1615 {
1616 struct rus_data data;
1617
1618 clear_special_calls ();
1619
1620 do
1621 {
1622 memset (&data, 0, sizeof (data));
1623 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1624 }
1625 while (data.repeat);
1626 }
1627
1628
1629 struct tree_opt_pass pass_remove_useless_stmts =
1630 {
1631 "useless", /* name */
1632 NULL, /* gate */
1633 remove_useless_stmts, /* execute */
1634 NULL, /* sub */
1635 NULL, /* next */
1636 0, /* static_pass_number */
1637 0, /* tv_id */
1638 PROP_gimple_any, /* properties_required */
1639 0, /* properties_provided */
1640 0, /* properties_destroyed */
1641 0, /* todo_flags_start */
1642 TODO_dump_func, /* todo_flags_finish */
1643 0 /* letter */
1644 };
1645
1646
1647 /* Remove obviously useless statements in basic block BB. */
1648
1649 static void
1650 cfg_remove_useless_stmts_bb (basic_block bb)
1651 {
1652 block_stmt_iterator bsi;
1653 tree stmt = NULL_TREE;
1654 tree cond, var = NULL_TREE, val = NULL_TREE;
1655 struct var_ann_d *ann;
1656
1657 /* Check whether we come here from a condition, and if so, get the
1658 condition. */
1659 if (EDGE_COUNT (bb->preds) != 1
1660 || !(EDGE_PRED (bb, 0)->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
1661 return;
1662
1663 cond = COND_EXPR_COND (last_stmt (EDGE_PRED (bb, 0)->src));
1664
1665 if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1666 {
1667 var = cond;
1668 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1669 ? boolean_false_node : boolean_true_node);
1670 }
1671 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR
1672 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1673 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL))
1674 {
1675 var = TREE_OPERAND (cond, 0);
1676 val = (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE
1677 ? boolean_true_node : boolean_false_node);
1678 }
1679 else
1680 {
1681 if (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE)
1682 cond = invert_truthvalue (cond);
1683 if (TREE_CODE (cond) == EQ_EXPR
1684 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1685 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1686 && (TREE_CODE (TREE_OPERAND (cond, 1)) == VAR_DECL
1687 || TREE_CODE (TREE_OPERAND (cond, 1)) == PARM_DECL
1688 || TREE_CONSTANT (TREE_OPERAND (cond, 1))))
1689 {
1690 var = TREE_OPERAND (cond, 0);
1691 val = TREE_OPERAND (cond, 1);
1692 }
1693 else
1694 return;
1695 }
1696
1697 /* Only work for normal local variables. */
1698 ann = var_ann (var);
1699 if (!ann
1700 || ann->may_aliases
1701 || TREE_ADDRESSABLE (var))
1702 return;
1703
1704 if (! TREE_CONSTANT (val))
1705 {
1706 ann = var_ann (val);
1707 if (!ann
1708 || ann->may_aliases
1709 || TREE_ADDRESSABLE (val))
1710 return;
1711 }
1712
1713 /* Ignore floating point variables, since comparison behaves weird for
1714 them. */
1715 if (FLOAT_TYPE_P (TREE_TYPE (var)))
1716 return;
1717
1718 for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
1719 {
1720 stmt = bsi_stmt (bsi);
1721
1722 /* If the THEN/ELSE clause merely assigns a value to a variable/parameter
1723 which is already known to contain that value, then remove the useless
1724 THEN/ELSE clause. */
1725 if (TREE_CODE (stmt) == MODIFY_EXPR
1726 && TREE_OPERAND (stmt, 0) == var
1727 && operand_equal_p (val, TREE_OPERAND (stmt, 1), 0))
1728 {
1729 bsi_remove (&bsi);
1730 continue;
1731 }
1732
1733 /* Invalidate the var if we encounter something that could modify it.
1734 Likewise for the value it was previously set to. Note that we only
1735 consider values that are either a VAR_DECL or PARM_DECL so we
1736 can test for conflict very simply. */
1737 if (TREE_CODE (stmt) == ASM_EXPR
1738 || (TREE_CODE (stmt) == MODIFY_EXPR
1739 && (TREE_OPERAND (stmt, 0) == var
1740 || TREE_OPERAND (stmt, 0) == val)))
1741 return;
1742
1743 bsi_next (&bsi);
1744 }
1745 }
1746
1747
1748 /* A CFG-aware version of remove_useless_stmts. */
1749
1750 void
1751 cfg_remove_useless_stmts (void)
1752 {
1753 basic_block bb;
1754
1755 #ifdef ENABLE_CHECKING
1756 verify_flow_info ();
1757 #endif
1758
1759 FOR_EACH_BB (bb)
1760 {
1761 cfg_remove_useless_stmts_bb (bb);
1762 }
1763 }
1764
1765
1766 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1767
1768 static void
1769 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1770 {
1771 tree phi;
1772
1773 /* Since this block is no longer reachable, we can just delete all
1774 of its PHI nodes. */
1775 phi = phi_nodes (bb);
1776 while (phi)
1777 {
1778 tree next = PHI_CHAIN (phi);
1779 remove_phi_node (phi, NULL_TREE, bb);
1780 phi = next;
1781 }
1782
1783 /* Remove edges to BB's successors. */
1784 while (EDGE_COUNT (bb->succs) > 0)
1785 ssa_remove_edge (EDGE_SUCC (bb, 0));
1786 }
1787
1788
1789 /* Remove statements of basic block BB. */
1790
1791 static void
1792 remove_bb (basic_block bb)
1793 {
1794 block_stmt_iterator i;
1795 source_locus loc = 0;
1796
1797 if (dump_file)
1798 {
1799 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1800 if (dump_flags & TDF_DETAILS)
1801 {
1802 dump_bb (bb, dump_file, 0);
1803 fprintf (dump_file, "\n");
1804 }
1805 }
1806
1807 /* Remove all the instructions in the block. */
1808 for (i = bsi_start (bb); !bsi_end_p (i);)
1809 {
1810 tree stmt = bsi_stmt (i);
1811 if (TREE_CODE (stmt) == LABEL_EXPR
1812 && FORCED_LABEL (LABEL_EXPR_LABEL (stmt)))
1813 {
1814 basic_block new_bb = bb->prev_bb;
1815 block_stmt_iterator new_bsi = bsi_after_labels (new_bb);
1816
1817 bsi_remove (&i);
1818 bsi_insert_after (&new_bsi, stmt, BSI_NEW_STMT);
1819 }
1820 else
1821 {
1822 release_defs (stmt);
1823
1824 set_bb_for_stmt (stmt, NULL);
1825 bsi_remove (&i);
1826 }
1827
1828 /* Don't warn for removed gotos. Gotos are often removed due to
1829 jump threading, thus resulting in bogus warnings. Not great,
1830 since this way we lose warnings for gotos in the original
1831 program that are indeed unreachable. */
1832 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
1833 #ifdef USE_MAPPED_LOCATION
1834 loc = EXPR_LOCATION (stmt);
1835 #else
1836 loc = EXPR_LOCUS (stmt);
1837 #endif
1838 }
1839
1840 /* If requested, give a warning that the first statement in the
1841 block is unreachable. We walk statements backwards in the
1842 loop above, so the last statement we process is the first statement
1843 in the block. */
1844 if (warn_notreached && loc)
1845 #ifdef USE_MAPPED_LOCATION
1846 warning ("%Hwill never be executed", &loc);
1847 #else
1848 warning ("%Hwill never be executed", loc);
1849 #endif
1850
1851 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1852 }
1853
1854 /* Try to remove superfluous control structures. */
1855
1856 static bool
1857 cleanup_control_flow (void)
1858 {
1859 basic_block bb;
1860 block_stmt_iterator bsi;
1861 bool retval = false;
1862 tree stmt;
1863
1864 FOR_EACH_BB (bb)
1865 {
1866 bsi = bsi_last (bb);
1867
1868 if (bsi_end_p (bsi))
1869 continue;
1870
1871 stmt = bsi_stmt (bsi);
1872 if (TREE_CODE (stmt) == COND_EXPR
1873 || TREE_CODE (stmt) == SWITCH_EXPR)
1874 retval |= cleanup_control_expr_graph (bb, bsi);
1875 }
1876 return retval;
1877 }
1878
1879
1880 /* Disconnect an unreachable block in the control expression starting
1881 at block BB. */
1882
1883 static bool
1884 cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
1885 {
1886 edge taken_edge;
1887 bool retval = false;
1888 tree expr = bsi_stmt (bsi), val;
1889
1890 if (EDGE_COUNT (bb->succs) > 1)
1891 {
1892 edge e;
1893 edge_iterator ei;
1894
1895 switch (TREE_CODE (expr))
1896 {
1897 case COND_EXPR:
1898 val = COND_EXPR_COND (expr);
1899 break;
1900
1901 case SWITCH_EXPR:
1902 val = SWITCH_COND (expr);
1903 if (TREE_CODE (val) != INTEGER_CST)
1904 return false;
1905 break;
1906
1907 default:
1908 gcc_unreachable ();
1909 }
1910
1911 taken_edge = find_taken_edge (bb, val);
1912 if (!taken_edge)
1913 return false;
1914
1915 /* Remove all the edges except the one that is always executed. */
1916 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1917 {
1918 if (e != taken_edge)
1919 {
1920 taken_edge->probability += e->probability;
1921 taken_edge->count += e->count;
1922 ssa_remove_edge (e);
1923 retval = true;
1924 }
1925 else
1926 ei_next (&ei);
1927 }
1928 if (taken_edge->probability > REG_BR_PROB_BASE)
1929 taken_edge->probability = REG_BR_PROB_BASE;
1930 }
1931 else
1932 taken_edge = EDGE_SUCC (bb, 0);
1933
1934 bsi_remove (&bsi);
1935 taken_edge->flags = EDGE_FALLTHRU;
1936
1937 /* We removed some paths from the cfg. */
1938 free_dominance_info (CDI_DOMINATORS);
1939
1940 return retval;
1941 }
1942
1943
1944 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1945 predicate VAL, return the edge that will be taken out of the block.
1946 If VAL does not match a unique edge, NULL is returned. */
1947
1948 edge
1949 find_taken_edge (basic_block bb, tree val)
1950 {
1951 tree stmt;
1952
1953 stmt = last_stmt (bb);
1954
1955 gcc_assert (stmt);
1956 gcc_assert (is_ctrl_stmt (stmt));
1957 gcc_assert (val);
1958
1959 /* If VAL is a predicate of the form N RELOP N, where N is an
1960 SSA_NAME, we can usually determine its truth value. */
1961 if (COMPARISON_CLASS_P (val))
1962 val = fold (val);
1963
1964 /* If VAL is not a constant, we can't determine which edge might
1965 be taken. */
1966 if (!really_constant_p (val))
1967 return NULL;
1968
1969 if (TREE_CODE (stmt) == COND_EXPR)
1970 return find_taken_edge_cond_expr (bb, val);
1971
1972 if (TREE_CODE (stmt) == SWITCH_EXPR)
1973 return find_taken_edge_switch_expr (bb, val);
1974
1975 gcc_unreachable ();
1976 }
1977
1978
1979 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1980 statement, determine which of the two edges will be taken out of the
1981 block. Return NULL if either edge may be taken. */
1982
1983 static edge
1984 find_taken_edge_cond_expr (basic_block bb, tree val)
1985 {
1986 edge true_edge, false_edge;
1987
1988 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1989
1990 /* Otherwise, try to determine which branch of the if() will be taken.
1991 If VAL is a constant but it can't be reduced to a 0 or a 1, then
1992 we don't really know which edge will be taken at runtime. This
1993 may happen when comparing addresses (e.g., if (&var1 == 4)). */
1994 if (integer_nonzerop (val))
1995 return true_edge;
1996 else if (integer_zerop (val))
1997 return false_edge;
1998 else
1999 return NULL;
2000 }
2001
2002
2003 /* Given a constant value VAL and the entry block BB to a SWITCH_EXPR
2004 statement, determine which edge will be taken out of the block. Return
2005 NULL if any edge may be taken. */
2006
2007 static edge
2008 find_taken_edge_switch_expr (basic_block bb, tree val)
2009 {
2010 tree switch_expr, taken_case;
2011 basic_block dest_bb;
2012 edge e;
2013
2014 if (TREE_CODE (val) != INTEGER_CST)
2015 return NULL;
2016
2017 switch_expr = last_stmt (bb);
2018 taken_case = find_case_label_for_value (switch_expr, val);
2019 dest_bb = label_to_block (CASE_LABEL (taken_case));
2020
2021 e = find_edge (bb, dest_bb);
2022 gcc_assert (e);
2023 return e;
2024 }
2025
2026
2027 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2028 We can make optimal use here of the fact that the case labels are
2029 sorted: We can do a binary search for a case matching VAL. */
2030
2031 static tree
2032 find_case_label_for_value (tree switch_expr, tree val)
2033 {
2034 tree vec = SWITCH_LABELS (switch_expr);
2035 size_t low, high, n = TREE_VEC_LENGTH (vec);
2036 tree default_case = TREE_VEC_ELT (vec, n - 1);
2037
2038 for (low = -1, high = n - 1; high - low > 1; )
2039 {
2040 size_t i = (high + low) / 2;
2041 tree t = TREE_VEC_ELT (vec, i);
2042 int cmp;
2043
2044 /* Cache the result of comparing CASE_LOW and val. */
2045 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2046
2047 if (cmp > 0)
2048 high = i;
2049 else
2050 low = i;
2051
2052 if (CASE_HIGH (t) == NULL)
2053 {
2054 /* A singe-valued case label. */
2055 if (cmp == 0)
2056 return t;
2057 }
2058 else
2059 {
2060 /* A case range. We can only handle integer ranges. */
2061 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2062 return t;
2063 }
2064 }
2065
2066 return default_case;
2067 }
2068
2069
2070 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
2071 those alternatives are equal in each of the PHI nodes, then return
2072 true, else return false. */
2073
2074 static bool
2075 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
2076 {
2077 tree phi, val1, val2;
2078 int n1, n2;
2079
2080 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2081 {
2082 n1 = phi_arg_from_edge (phi, e1);
2083 n2 = phi_arg_from_edge (phi, e2);
2084
2085 gcc_assert (n1 >= 0);
2086 gcc_assert (n2 >= 0);
2087
2088 val1 = PHI_ARG_DEF (phi, n1);
2089 val2 = PHI_ARG_DEF (phi, n2);
2090
2091 if (!operand_equal_p (val1, val2, 0))
2092 return false;
2093 }
2094
2095 return true;
2096 }
2097
2098
2099 /*---------------------------------------------------------------------------
2100 Debugging functions
2101 ---------------------------------------------------------------------------*/
2102
2103 /* Dump tree-specific information of block BB to file OUTF. */
2104
2105 void
2106 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2107 {
2108 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2109 }
2110
2111
2112 /* Dump a basic block on stderr. */
2113
2114 void
2115 debug_tree_bb (basic_block bb)
2116 {
2117 dump_bb (bb, stderr, 0);
2118 }
2119
2120
2121 /* Dump basic block with index N on stderr. */
2122
2123 basic_block
2124 debug_tree_bb_n (int n)
2125 {
2126 debug_tree_bb (BASIC_BLOCK (n));
2127 return BASIC_BLOCK (n);
2128 }
2129
2130
2131 /* Dump the CFG on stderr.
2132
2133 FLAGS are the same used by the tree dumping functions
2134 (see TDF_* in tree.h). */
2135
2136 void
2137 debug_tree_cfg (int flags)
2138 {
2139 dump_tree_cfg (stderr, flags);
2140 }
2141
2142
2143 /* Dump the program showing basic block boundaries on the given FILE.
2144
2145 FLAGS are the same used by the tree dumping functions (see TDF_* in
2146 tree.h). */
2147
2148 void
2149 dump_tree_cfg (FILE *file, int flags)
2150 {
2151 if (flags & TDF_DETAILS)
2152 {
2153 const char *funcname
2154 = lang_hooks.decl_printable_name (current_function_decl, 2);
2155
2156 fputc ('\n', file);
2157 fprintf (file, ";; Function %s\n\n", funcname);
2158 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2159 n_basic_blocks, n_edges, last_basic_block);
2160
2161 brief_dump_cfg (file);
2162 fprintf (file, "\n");
2163 }
2164
2165 if (flags & TDF_STATS)
2166 dump_cfg_stats (file);
2167
2168 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2169 }
2170
2171
2172 /* Dump CFG statistics on FILE. */
2173
2174 void
2175 dump_cfg_stats (FILE *file)
2176 {
2177 static long max_num_merged_labels = 0;
2178 unsigned long size, total = 0;
2179 int n_edges;
2180 basic_block bb;
2181 const char * const fmt_str = "%-30s%-13s%12s\n";
2182 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2183 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2184 const char *funcname
2185 = lang_hooks.decl_printable_name (current_function_decl, 2);
2186
2187
2188 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2189
2190 fprintf (file, "---------------------------------------------------------\n");
2191 fprintf (file, fmt_str, "", " Number of ", "Memory");
2192 fprintf (file, fmt_str, "", " instances ", "used ");
2193 fprintf (file, "---------------------------------------------------------\n");
2194
2195 size = n_basic_blocks * sizeof (struct basic_block_def);
2196 total += size;
2197 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2198 SCALE (size), LABEL (size));
2199
2200 n_edges = 0;
2201 FOR_EACH_BB (bb)
2202 n_edges += EDGE_COUNT (bb->succs);
2203 size = n_edges * sizeof (struct edge_def);
2204 total += size;
2205 fprintf (file, fmt_str_1, "Edges", n_edges, SCALE (size), LABEL (size));
2206
2207 size = n_basic_blocks * sizeof (struct bb_ann_d);
2208 total += size;
2209 fprintf (file, fmt_str_1, "Basic block annotations", n_basic_blocks,
2210 SCALE (size), LABEL (size));
2211
2212 fprintf (file, "---------------------------------------------------------\n");
2213 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2214 LABEL (total));
2215 fprintf (file, "---------------------------------------------------------\n");
2216 fprintf (file, "\n");
2217
2218 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2219 max_num_merged_labels = cfg_stats.num_merged_labels;
2220
2221 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2222 cfg_stats.num_merged_labels, max_num_merged_labels);
2223
2224 fprintf (file, "\n");
2225 }
2226
2227
2228 /* Dump CFG statistics on stderr. Keep extern so that it's always
2229 linked in the final executable. */
2230
2231 void
2232 debug_cfg_stats (void)
2233 {
2234 dump_cfg_stats (stderr);
2235 }
2236
2237
2238 /* Dump the flowgraph to a .vcg FILE. */
2239
2240 static void
2241 tree_cfg2vcg (FILE *file)
2242 {
2243 edge e;
2244 edge_iterator ei;
2245 basic_block bb;
2246 const char *funcname
2247 = lang_hooks.decl_printable_name (current_function_decl, 2);
2248
2249 /* Write the file header. */
2250 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2251 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2252 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2253
2254 /* Write blocks and edges. */
2255 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2256 {
2257 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2258 e->dest->index);
2259
2260 if (e->flags & EDGE_FAKE)
2261 fprintf (file, " linestyle: dotted priority: 10");
2262 else
2263 fprintf (file, " linestyle: solid priority: 100");
2264
2265 fprintf (file, " }\n");
2266 }
2267 fputc ('\n', file);
2268
2269 FOR_EACH_BB (bb)
2270 {
2271 enum tree_code head_code, end_code;
2272 const char *head_name, *end_name;
2273 int head_line = 0;
2274 int end_line = 0;
2275 tree first = first_stmt (bb);
2276 tree last = last_stmt (bb);
2277
2278 if (first)
2279 {
2280 head_code = TREE_CODE (first);
2281 head_name = tree_code_name[head_code];
2282 head_line = get_lineno (first);
2283 }
2284 else
2285 head_name = "no-statement";
2286
2287 if (last)
2288 {
2289 end_code = TREE_CODE (last);
2290 end_name = tree_code_name[end_code];
2291 end_line = get_lineno (last);
2292 }
2293 else
2294 end_name = "no-statement";
2295
2296 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2297 bb->index, bb->index, head_name, head_line, end_name,
2298 end_line);
2299
2300 FOR_EACH_EDGE (e, ei, bb->succs)
2301 {
2302 if (e->dest == EXIT_BLOCK_PTR)
2303 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2304 else
2305 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2306
2307 if (e->flags & EDGE_FAKE)
2308 fprintf (file, " priority: 10 linestyle: dotted");
2309 else
2310 fprintf (file, " priority: 100 linestyle: solid");
2311
2312 fprintf (file, " }\n");
2313 }
2314
2315 if (bb->next_bb != EXIT_BLOCK_PTR)
2316 fputc ('\n', file);
2317 }
2318
2319 fputs ("}\n\n", file);
2320 }
2321
2322
2323
2324 /*---------------------------------------------------------------------------
2325 Miscellaneous helpers
2326 ---------------------------------------------------------------------------*/
2327
2328 /* Return true if T represents a stmt that always transfers control. */
2329
2330 bool
2331 is_ctrl_stmt (tree t)
2332 {
2333 return (TREE_CODE (t) == COND_EXPR
2334 || TREE_CODE (t) == SWITCH_EXPR
2335 || TREE_CODE (t) == GOTO_EXPR
2336 || TREE_CODE (t) == RETURN_EXPR
2337 || TREE_CODE (t) == RESX_EXPR);
2338 }
2339
2340
2341 /* Return true if T is a statement that may alter the flow of control
2342 (e.g., a call to a non-returning function). */
2343
2344 bool
2345 is_ctrl_altering_stmt (tree t)
2346 {
2347 tree call;
2348
2349 gcc_assert (t);
2350 call = get_call_expr_in (t);
2351 if (call)
2352 {
2353 /* A non-pure/const CALL_EXPR alters flow control if the current
2354 function has nonlocal labels. */
2355 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2356 return true;
2357
2358 /* A CALL_EXPR also alters control flow if it does not return. */
2359 if (call_expr_flags (call) & (ECF_NORETURN | ECF_LONGJMP))
2360 return true;
2361 }
2362
2363 /* If a statement can throw, it alters control flow. */
2364 return tree_can_throw_internal (t);
2365 }
2366
2367
2368 /* Return true if T is a computed goto. */
2369
2370 bool
2371 computed_goto_p (tree t)
2372 {
2373 return (TREE_CODE (t) == GOTO_EXPR
2374 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2375 }
2376
2377
2378 /* Checks whether EXPR is a simple local goto. */
2379
2380 bool
2381 simple_goto_p (tree expr)
2382 {
2383 return (TREE_CODE (expr) == GOTO_EXPR
2384 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2385 }
2386
2387
2388 /* Return true if T should start a new basic block. PREV_T is the
2389 statement preceding T. It is used when T is a label or a case label.
2390 Labels should only start a new basic block if their previous statement
2391 wasn't a label. Otherwise, sequence of labels would generate
2392 unnecessary basic blocks that only contain a single label. */
2393
2394 static inline bool
2395 stmt_starts_bb_p (tree t, tree prev_t)
2396 {
2397 enum tree_code code;
2398
2399 if (t == NULL_TREE)
2400 return false;
2401
2402 /* LABEL_EXPRs start a new basic block only if the preceding
2403 statement wasn't a label of the same type. This prevents the
2404 creation of consecutive blocks that have nothing but a single
2405 label. */
2406 code = TREE_CODE (t);
2407 if (code == LABEL_EXPR)
2408 {
2409 /* Nonlocal and computed GOTO targets always start a new block. */
2410 if (code == LABEL_EXPR
2411 && (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2412 || FORCED_LABEL (LABEL_EXPR_LABEL (t))))
2413 return true;
2414
2415 if (prev_t && TREE_CODE (prev_t) == code)
2416 {
2417 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2418 return true;
2419
2420 cfg_stats.num_merged_labels++;
2421 return false;
2422 }
2423 else
2424 return true;
2425 }
2426
2427 return false;
2428 }
2429
2430
2431 /* Return true if T should end a basic block. */
2432
2433 bool
2434 stmt_ends_bb_p (tree t)
2435 {
2436 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2437 }
2438
2439
2440 /* Add gotos that used to be represented implicitly in the CFG. */
2441
2442 void
2443 disband_implicit_edges (void)
2444 {
2445 basic_block bb;
2446 block_stmt_iterator last;
2447 edge e;
2448 edge_iterator ei;
2449 tree stmt, label;
2450
2451 FOR_EACH_BB (bb)
2452 {
2453 last = bsi_last (bb);
2454 stmt = last_stmt (bb);
2455
2456 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2457 {
2458 /* Remove superfluous gotos from COND_EXPR branches. Moved
2459 from cfg_remove_useless_stmts here since it violates the
2460 invariants for tree--cfg correspondence and thus fits better
2461 here where we do it anyway. */
2462 FOR_EACH_EDGE (e, ei, bb->succs)
2463 {
2464 if (e->dest != bb->next_bb)
2465 continue;
2466
2467 if (e->flags & EDGE_TRUE_VALUE)
2468 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2469 else if (e->flags & EDGE_FALSE_VALUE)
2470 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2471 else
2472 gcc_unreachable ();
2473 e->flags |= EDGE_FALLTHRU;
2474 }
2475
2476 continue;
2477 }
2478
2479 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2480 {
2481 /* Remove the RETURN_EXPR if we may fall though to the exit
2482 instead. */
2483 gcc_assert (EDGE_COUNT (bb->succs) == 1);
2484 gcc_assert (EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR);
2485
2486 if (bb->next_bb == EXIT_BLOCK_PTR
2487 && !TREE_OPERAND (stmt, 0))
2488 {
2489 bsi_remove (&last);
2490 EDGE_SUCC (bb, 0)->flags |= EDGE_FALLTHRU;
2491 }
2492 continue;
2493 }
2494
2495 /* There can be no fallthru edge if the last statement is a control
2496 one. */
2497 if (stmt && is_ctrl_stmt (stmt))
2498 continue;
2499
2500 /* Find a fallthru edge and emit the goto if necessary. */
2501 FOR_EACH_EDGE (e, ei, bb->succs)
2502 if (e->flags & EDGE_FALLTHRU)
2503 break;
2504
2505 if (!e || e->dest == bb->next_bb)
2506 continue;
2507
2508 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2509 label = tree_block_label (e->dest);
2510
2511 stmt = build1 (GOTO_EXPR, void_type_node, label);
2512 #ifdef USE_MAPPED_LOCATION
2513 SET_EXPR_LOCATION (stmt, e->goto_locus);
2514 #else
2515 SET_EXPR_LOCUS (stmt, e->goto_locus);
2516 #endif
2517 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2518 e->flags &= ~EDGE_FALLTHRU;
2519 }
2520 }
2521
2522 /* Remove block annotations and other datastructures. */
2523
2524 void
2525 delete_tree_cfg_annotations (void)
2526 {
2527 basic_block bb;
2528 if (n_basic_blocks > 0)
2529 free_blocks_annotations ();
2530
2531 label_to_block_map = NULL;
2532 free_rbi_pool ();
2533 FOR_EACH_BB (bb)
2534 bb->rbi = NULL;
2535 }
2536
2537
2538 /* Return the first statement in basic block BB. */
2539
2540 tree
2541 first_stmt (basic_block bb)
2542 {
2543 block_stmt_iterator i = bsi_start (bb);
2544 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2545 }
2546
2547
2548 /* Return the last statement in basic block BB. */
2549
2550 tree
2551 last_stmt (basic_block bb)
2552 {
2553 block_stmt_iterator b = bsi_last (bb);
2554 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2555 }
2556
2557
2558 /* Return a pointer to the last statement in block BB. */
2559
2560 tree *
2561 last_stmt_ptr (basic_block bb)
2562 {
2563 block_stmt_iterator last = bsi_last (bb);
2564 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2565 }
2566
2567
2568 /* Return the last statement of an otherwise empty block. Return NULL
2569 if the block is totally empty, or if it contains more than one
2570 statement. */
2571
2572 tree
2573 last_and_only_stmt (basic_block bb)
2574 {
2575 block_stmt_iterator i = bsi_last (bb);
2576 tree last, prev;
2577
2578 if (bsi_end_p (i))
2579 return NULL_TREE;
2580
2581 last = bsi_stmt (i);
2582 bsi_prev (&i);
2583 if (bsi_end_p (i))
2584 return last;
2585
2586 /* Empty statements should no longer appear in the instruction stream.
2587 Everything that might have appeared before should be deleted by
2588 remove_useless_stmts, and the optimizers should just bsi_remove
2589 instead of smashing with build_empty_stmt.
2590
2591 Thus the only thing that should appear here in a block containing
2592 one executable statement is a label. */
2593 prev = bsi_stmt (i);
2594 if (TREE_CODE (prev) == LABEL_EXPR)
2595 return last;
2596 else
2597 return NULL_TREE;
2598 }
2599
2600
2601 /* Mark BB as the basic block holding statement T. */
2602
2603 void
2604 set_bb_for_stmt (tree t, basic_block bb)
2605 {
2606 if (TREE_CODE (t) == PHI_NODE)
2607 PHI_BB (t) = bb;
2608 else if (TREE_CODE (t) == STATEMENT_LIST)
2609 {
2610 tree_stmt_iterator i;
2611 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2612 set_bb_for_stmt (tsi_stmt (i), bb);
2613 }
2614 else
2615 {
2616 stmt_ann_t ann = get_stmt_ann (t);
2617 ann->bb = bb;
2618
2619 /* If the statement is a label, add the label to block-to-labels map
2620 so that we can speed up edge creation for GOTO_EXPRs. */
2621 if (TREE_CODE (t) == LABEL_EXPR)
2622 {
2623 int uid;
2624
2625 t = LABEL_EXPR_LABEL (t);
2626 uid = LABEL_DECL_UID (t);
2627 if (uid == -1)
2628 {
2629 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2630 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2631 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2632 }
2633 else
2634 /* We're moving an existing label. Make sure that we've
2635 removed it from the old block. */
2636 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2637 VARRAY_BB (label_to_block_map, uid) = bb;
2638 }
2639 }
2640 }
2641
2642 /* Finds iterator for STMT. */
2643
2644 extern block_stmt_iterator
2645 bsi_for_stmt (tree stmt)
2646 {
2647 block_stmt_iterator bsi;
2648
2649 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2650 if (bsi_stmt (bsi) == stmt)
2651 return bsi;
2652
2653 gcc_unreachable ();
2654 }
2655
2656 /* Insert statement (or statement list) T before the statement
2657 pointed-to by iterator I. M specifies how to update iterator I
2658 after insertion (see enum bsi_iterator_update). */
2659
2660 void
2661 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2662 {
2663 set_bb_for_stmt (t, i->bb);
2664 tsi_link_before (&i->tsi, t, m);
2665 modify_stmt (t);
2666 }
2667
2668
2669 /* Insert statement (or statement list) T after the statement
2670 pointed-to by iterator I. M specifies how to update iterator I
2671 after insertion (see enum bsi_iterator_update). */
2672
2673 void
2674 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2675 {
2676 set_bb_for_stmt (t, i->bb);
2677 tsi_link_after (&i->tsi, t, m);
2678 modify_stmt (t);
2679 }
2680
2681
2682 /* Remove the statement pointed to by iterator I. The iterator is updated
2683 to the next statement. */
2684
2685 void
2686 bsi_remove (block_stmt_iterator *i)
2687 {
2688 tree t = bsi_stmt (*i);
2689 set_bb_for_stmt (t, NULL);
2690 tsi_delink (&i->tsi);
2691 }
2692
2693
2694 /* Move the statement at FROM so it comes right after the statement at TO. */
2695
2696 void
2697 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2698 {
2699 tree stmt = bsi_stmt (*from);
2700 bsi_remove (from);
2701 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2702 }
2703
2704
2705 /* Move the statement at FROM so it comes right before the statement at TO. */
2706
2707 void
2708 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2709 {
2710 tree stmt = bsi_stmt (*from);
2711 bsi_remove (from);
2712 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2713 }
2714
2715
2716 /* Move the statement at FROM to the end of basic block BB. */
2717
2718 void
2719 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2720 {
2721 block_stmt_iterator last = bsi_last (bb);
2722
2723 /* Have to check bsi_end_p because it could be an empty block. */
2724 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2725 bsi_move_before (from, &last);
2726 else
2727 bsi_move_after (from, &last);
2728 }
2729
2730
2731 /* Replace the contents of the statement pointed to by iterator BSI
2732 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2733 information of the original statement is preserved. */
2734
2735 void
2736 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2737 {
2738 int eh_region;
2739 tree orig_stmt = bsi_stmt (*bsi);
2740
2741 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2742 set_bb_for_stmt (stmt, bsi->bb);
2743
2744 /* Preserve EH region information from the original statement, if
2745 requested by the caller. */
2746 if (preserve_eh_info)
2747 {
2748 eh_region = lookup_stmt_eh_region (orig_stmt);
2749 if (eh_region >= 0)
2750 add_stmt_to_eh_region (stmt, eh_region);
2751 }
2752
2753 *bsi_stmt_ptr (*bsi) = stmt;
2754 modify_stmt (stmt);
2755 }
2756
2757
2758 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2759 is made to place the statement in an existing basic block, but
2760 sometimes that isn't possible. When it isn't possible, the edge is
2761 split and the statement is added to the new block.
2762
2763 In all cases, the returned *BSI points to the correct location. The
2764 return value is true if insertion should be done after the location,
2765 or false if it should be done before the location. If new basic block
2766 has to be created, it is stored in *NEW_BB. */
2767
2768 static bool
2769 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2770 basic_block *new_bb)
2771 {
2772 basic_block dest, src;
2773 tree tmp;
2774
2775 dest = e->dest;
2776 restart:
2777
2778 /* If the destination has one predecessor which has no PHI nodes,
2779 insert there. Except for the exit block.
2780
2781 The requirement for no PHI nodes could be relaxed. Basically we
2782 would have to examine the PHIs to prove that none of them used
2783 the value set by the statement we want to insert on E. That
2784 hardly seems worth the effort. */
2785 if (EDGE_COUNT (dest->preds) == 1
2786 && ! phi_nodes (dest)
2787 && dest != EXIT_BLOCK_PTR)
2788 {
2789 *bsi = bsi_start (dest);
2790 if (bsi_end_p (*bsi))
2791 return true;
2792
2793 /* Make sure we insert after any leading labels. */
2794 tmp = bsi_stmt (*bsi);
2795 while (TREE_CODE (tmp) == LABEL_EXPR)
2796 {
2797 bsi_next (bsi);
2798 if (bsi_end_p (*bsi))
2799 break;
2800 tmp = bsi_stmt (*bsi);
2801 }
2802
2803 if (bsi_end_p (*bsi))
2804 {
2805 *bsi = bsi_last (dest);
2806 return true;
2807 }
2808 else
2809 return false;
2810 }
2811
2812 /* If the source has one successor, the edge is not abnormal and
2813 the last statement does not end a basic block, insert there.
2814 Except for the entry block. */
2815 src = e->src;
2816 if ((e->flags & EDGE_ABNORMAL) == 0
2817 && EDGE_COUNT (src->succs) == 1
2818 && src != ENTRY_BLOCK_PTR)
2819 {
2820 *bsi = bsi_last (src);
2821 if (bsi_end_p (*bsi))
2822 return true;
2823
2824 tmp = bsi_stmt (*bsi);
2825 if (!stmt_ends_bb_p (tmp))
2826 return true;
2827
2828 /* Insert code just before returning the value. We may need to decompose
2829 the return in the case it contains non-trivial operand. */
2830 if (TREE_CODE (tmp) == RETURN_EXPR)
2831 {
2832 tree op = TREE_OPERAND (tmp, 0);
2833 if (!is_gimple_val (op))
2834 {
2835 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
2836 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2837 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
2838 }
2839 bsi_prev (bsi);
2840 return true;
2841 }
2842 }
2843
2844 /* Otherwise, create a new basic block, and split this edge. */
2845 dest = split_edge (e);
2846 if (new_bb)
2847 *new_bb = dest;
2848 e = EDGE_PRED (dest, 0);
2849 goto restart;
2850 }
2851
2852
2853 /* This routine will commit all pending edge insertions, creating any new
2854 basic blocks which are necessary.
2855
2856 If specified, NEW_BLOCKS returns a count of the number of new basic
2857 blocks which were created. */
2858
2859 void
2860 bsi_commit_edge_inserts (int *new_blocks)
2861 {
2862 basic_block bb;
2863 edge e;
2864 int blocks;
2865 edge_iterator ei;
2866
2867 blocks = n_basic_blocks;
2868
2869 bsi_commit_one_edge_insert (EDGE_SUCC (ENTRY_BLOCK_PTR, 0), NULL);
2870
2871 FOR_EACH_BB (bb)
2872 FOR_EACH_EDGE (e, ei, bb->succs)
2873 bsi_commit_one_edge_insert (e, NULL);
2874
2875 if (new_blocks)
2876 *new_blocks = n_basic_blocks - blocks;
2877 }
2878
2879
2880 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2881 to this block, otherwise set it to NULL. */
2882
2883 void
2884 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2885 {
2886 if (new_bb)
2887 *new_bb = NULL;
2888 if (PENDING_STMT (e))
2889 {
2890 block_stmt_iterator bsi;
2891 tree stmt = PENDING_STMT (e);
2892
2893 PENDING_STMT (e) = NULL_TREE;
2894
2895 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
2896 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2897 else
2898 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2899 }
2900 }
2901
2902
2903 /* Add STMT to the pending list of edge E. No actual insertion is
2904 made until a call to bsi_commit_edge_inserts () is made. */
2905
2906 void
2907 bsi_insert_on_edge (edge e, tree stmt)
2908 {
2909 append_to_statement_list (stmt, &PENDING_STMT (e));
2910 }
2911
2912 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If new block has to
2913 be created, it is returned. */
2914
2915 basic_block
2916 bsi_insert_on_edge_immediate (edge e, tree stmt)
2917 {
2918 block_stmt_iterator bsi;
2919 basic_block new_bb = NULL;
2920
2921 gcc_assert (!PENDING_STMT (e));
2922
2923 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
2924 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2925 else
2926 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2927
2928 return new_bb;
2929 }
2930
2931 /*---------------------------------------------------------------------------
2932 Tree specific functions for CFG manipulation
2933 ---------------------------------------------------------------------------*/
2934
2935 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2936 Abort on abnormal edges. */
2937
2938 static basic_block
2939 tree_split_edge (edge edge_in)
2940 {
2941 basic_block new_bb, after_bb, dest, src;
2942 edge new_edge, e;
2943 tree phi;
2944 int i, num_elem;
2945 edge_iterator ei;
2946
2947 /* Abnormal edges cannot be split. */
2948 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2949
2950 src = edge_in->src;
2951 dest = edge_in->dest;
2952
2953 /* Place the new block in the block list. Try to keep the new block
2954 near its "logical" location. This is of most help to humans looking
2955 at debugging dumps. */
2956 FOR_EACH_EDGE (e, ei, dest->preds)
2957 if (e->src->next_bb == dest)
2958 break;
2959 if (!e)
2960 after_bb = dest->prev_bb;
2961 else
2962 after_bb = edge_in->src;
2963
2964 new_bb = create_empty_bb (after_bb);
2965 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2966 new_bb->count = edge_in->count;
2967 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2968 new_edge->probability = REG_BR_PROB_BASE;
2969 new_edge->count = edge_in->count;
2970
2971 /* Find all the PHI arguments on the original edge, and change them to
2972 the new edge. Do it before redirection, so that the argument does not
2973 get removed. */
2974 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
2975 {
2976 num_elem = PHI_NUM_ARGS (phi);
2977 for (i = 0; i < num_elem; i++)
2978 if (PHI_ARG_EDGE (phi, i) == edge_in)
2979 {
2980 PHI_ARG_EDGE (phi, i) = new_edge;
2981 break;
2982 }
2983 }
2984
2985 e = redirect_edge_and_branch (edge_in, new_bb);
2986 gcc_assert (e);
2987 gcc_assert (!PENDING_STMT (edge_in));
2988
2989 return new_bb;
2990 }
2991
2992
2993 /* Return true when BB has label LABEL in it. */
2994
2995 static bool
2996 has_label_p (basic_block bb, tree label)
2997 {
2998 block_stmt_iterator bsi;
2999
3000 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3001 {
3002 tree stmt = bsi_stmt (bsi);
3003
3004 if (TREE_CODE (stmt) != LABEL_EXPR)
3005 return false;
3006 if (LABEL_EXPR_LABEL (stmt) == label)
3007 return true;
3008 }
3009 return false;
3010 }
3011
3012
3013 /* Callback for walk_tree, check that all elements with address taken are
3014 properly noticed as such. */
3015
3016 static tree
3017 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3018 {
3019 tree t = *tp, x;
3020
3021 if (TYPE_P (t))
3022 *walk_subtrees = 0;
3023
3024 /* Check operand N for being valid GIMPLE and give error MSG if not.
3025 We check for constants explicitly since they are not considered
3026 gimple invariants if they overflowed. */
3027 #define CHECK_OP(N, MSG) \
3028 do { if (!CONSTANT_CLASS_P (TREE_OPERAND (t, N)) \
3029 && !is_gimple_val (TREE_OPERAND (t, N))) \
3030 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3031
3032 switch (TREE_CODE (t))
3033 {
3034 case SSA_NAME:
3035 if (SSA_NAME_IN_FREE_LIST (t))
3036 {
3037 error ("SSA name in freelist but still referenced");
3038 return *tp;
3039 }
3040 break;
3041
3042 case MODIFY_EXPR:
3043 x = TREE_OPERAND (t, 0);
3044 if (TREE_CODE (x) == BIT_FIELD_REF
3045 && is_gimple_reg (TREE_OPERAND (x, 0)))
3046 {
3047 error ("GIMPLE register modified with BIT_FIELD_REF");
3048 return t;
3049 }
3050 break;
3051
3052 case ADDR_EXPR:
3053 /* Skip any references (they will be checked when we recurse down the
3054 tree) and ensure that any variable used as a prefix is marked
3055 addressable. */
3056 for (x = TREE_OPERAND (t, 0);
3057 (handled_component_p (x)
3058 || TREE_CODE (x) == REALPART_EXPR
3059 || TREE_CODE (x) == IMAGPART_EXPR);
3060 x = TREE_OPERAND (x, 0))
3061 ;
3062
3063 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3064 return NULL;
3065 if (!TREE_ADDRESSABLE (x))
3066 {
3067 error ("address taken, but ADDRESSABLE bit not set");
3068 return x;
3069 }
3070 break;
3071
3072 case COND_EXPR:
3073 x = TREE_OPERAND (t, 0);
3074 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3075 {
3076 error ("non-boolean used in condition");
3077 return x;
3078 }
3079 break;
3080
3081 case NOP_EXPR:
3082 case CONVERT_EXPR:
3083 case FIX_TRUNC_EXPR:
3084 case FIX_CEIL_EXPR:
3085 case FIX_FLOOR_EXPR:
3086 case FIX_ROUND_EXPR:
3087 case FLOAT_EXPR:
3088 case NEGATE_EXPR:
3089 case ABS_EXPR:
3090 case BIT_NOT_EXPR:
3091 case NON_LVALUE_EXPR:
3092 case TRUTH_NOT_EXPR:
3093 CHECK_OP (0, "Invalid operand to unary operator");
3094 break;
3095
3096 case REALPART_EXPR:
3097 case IMAGPART_EXPR:
3098 case COMPONENT_REF:
3099 case ARRAY_REF:
3100 case ARRAY_RANGE_REF:
3101 case BIT_FIELD_REF:
3102 case VIEW_CONVERT_EXPR:
3103 /* We have a nest of references. Verify that each of the operands
3104 that determine where to reference is either a constant or a variable,
3105 verify that the base is valid, and then show we've already checked
3106 the subtrees. */
3107 while (TREE_CODE (t) == REALPART_EXPR || TREE_CODE (t) == IMAGPART_EXPR
3108 || handled_component_p (t))
3109 {
3110 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3111 CHECK_OP (2, "Invalid COMPONENT_REF offset operator");
3112 else if (TREE_CODE (t) == ARRAY_REF
3113 || TREE_CODE (t) == ARRAY_RANGE_REF)
3114 {
3115 CHECK_OP (1, "Invalid array index.");
3116 if (TREE_OPERAND (t, 2))
3117 CHECK_OP (2, "Invalid array lower bound.");
3118 if (TREE_OPERAND (t, 3))
3119 CHECK_OP (3, "Invalid array stride.");
3120 }
3121 else if (TREE_CODE (t) == BIT_FIELD_REF)
3122 {
3123 CHECK_OP (1, "Invalid operand to BIT_FIELD_REF");
3124 CHECK_OP (2, "Invalid operand to BIT_FIELD_REF");
3125 }
3126
3127 t = TREE_OPERAND (t, 0);
3128 }
3129
3130 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3131 {
3132 error ("Invalid reference prefix.");
3133 return t;
3134 }
3135 *walk_subtrees = 0;
3136 break;
3137
3138 case LT_EXPR:
3139 case LE_EXPR:
3140 case GT_EXPR:
3141 case GE_EXPR:
3142 case EQ_EXPR:
3143 case NE_EXPR:
3144 case UNORDERED_EXPR:
3145 case ORDERED_EXPR:
3146 case UNLT_EXPR:
3147 case UNLE_EXPR:
3148 case UNGT_EXPR:
3149 case UNGE_EXPR:
3150 case UNEQ_EXPR:
3151 case LTGT_EXPR:
3152 case PLUS_EXPR:
3153 case MINUS_EXPR:
3154 case MULT_EXPR:
3155 case TRUNC_DIV_EXPR:
3156 case CEIL_DIV_EXPR:
3157 case FLOOR_DIV_EXPR:
3158 case ROUND_DIV_EXPR:
3159 case TRUNC_MOD_EXPR:
3160 case CEIL_MOD_EXPR:
3161 case FLOOR_MOD_EXPR:
3162 case ROUND_MOD_EXPR:
3163 case RDIV_EXPR:
3164 case EXACT_DIV_EXPR:
3165 case MIN_EXPR:
3166 case MAX_EXPR:
3167 case LSHIFT_EXPR:
3168 case RSHIFT_EXPR:
3169 case LROTATE_EXPR:
3170 case RROTATE_EXPR:
3171 case BIT_IOR_EXPR:
3172 case BIT_XOR_EXPR:
3173 case BIT_AND_EXPR:
3174 CHECK_OP (0, "Invalid operand to binary operator");
3175 CHECK_OP (1, "Invalid operand to binary operator");
3176 break;
3177
3178 default:
3179 break;
3180 }
3181 return NULL;
3182
3183 #undef CHECK_OP
3184 }
3185
3186
3187 /* Verify STMT, return true if STMT is not in GIMPLE form.
3188 TODO: Implement type checking. */
3189
3190 static bool
3191 verify_stmt (tree stmt, bool last_in_block)
3192 {
3193 tree addr;
3194
3195 if (!is_gimple_stmt (stmt))
3196 {
3197 error ("Is not a valid GIMPLE statement.");
3198 goto fail;
3199 }
3200
3201 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3202 if (addr)
3203 {
3204 debug_generic_stmt (addr);
3205 return true;
3206 }
3207
3208 /* If the statement is marked as part of an EH region, then it is
3209 expected that the statement could throw. Verify that when we
3210 have optimizations that simplify statements such that we prove
3211 that they cannot throw, that we update other data structures
3212 to match. */
3213 if (lookup_stmt_eh_region (stmt) >= 0)
3214 {
3215 if (!tree_could_throw_p (stmt))
3216 {
3217 error ("Statement marked for throw, but doesn%'t.");
3218 goto fail;
3219 }
3220 if (!last_in_block && tree_can_throw_internal (stmt))
3221 {
3222 error ("Statement marked for throw in middle of block.");
3223 goto fail;
3224 }
3225 }
3226
3227 return false;
3228
3229 fail:
3230 debug_generic_stmt (stmt);
3231 return true;
3232 }
3233
3234
3235 /* Return true when the T can be shared. */
3236
3237 static bool
3238 tree_node_can_be_shared (tree t)
3239 {
3240 if (IS_TYPE_OR_DECL_P (t)
3241 /* We check for constants explicitly since they are not considered
3242 gimple invariants if they overflowed. */
3243 || CONSTANT_CLASS_P (t)
3244 || is_gimple_min_invariant (t)
3245 || TREE_CODE (t) == SSA_NAME)
3246 return true;
3247
3248 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3249 /* We check for constants explicitly since they are not considered
3250 gimple invariants if they overflowed. */
3251 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3252 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3253 || (TREE_CODE (t) == COMPONENT_REF
3254 || TREE_CODE (t) == REALPART_EXPR
3255 || TREE_CODE (t) == IMAGPART_EXPR))
3256 t = TREE_OPERAND (t, 0);
3257
3258 if (DECL_P (t))
3259 return true;
3260
3261 return false;
3262 }
3263
3264
3265 /* Called via walk_trees. Verify tree sharing. */
3266
3267 static tree
3268 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3269 {
3270 htab_t htab = (htab_t) data;
3271 void **slot;
3272
3273 if (tree_node_can_be_shared (*tp))
3274 {
3275 *walk_subtrees = false;
3276 return NULL;
3277 }
3278
3279 slot = htab_find_slot (htab, *tp, INSERT);
3280 if (*slot)
3281 return *slot;
3282 *slot = *tp;
3283
3284 return NULL;
3285 }
3286
3287
3288 /* Verify the GIMPLE statement chain. */
3289
3290 void
3291 verify_stmts (void)
3292 {
3293 basic_block bb;
3294 block_stmt_iterator bsi;
3295 bool err = false;
3296 htab_t htab;
3297 tree addr;
3298
3299 timevar_push (TV_TREE_STMT_VERIFY);
3300 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3301
3302 FOR_EACH_BB (bb)
3303 {
3304 tree phi;
3305 int i;
3306
3307 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3308 {
3309 int phi_num_args = PHI_NUM_ARGS (phi);
3310
3311 for (i = 0; i < phi_num_args; i++)
3312 {
3313 tree t = PHI_ARG_DEF (phi, i);
3314 tree addr;
3315
3316 /* Addressable variables do have SSA_NAMEs but they
3317 are not considered gimple values. */
3318 if (TREE_CODE (t) != SSA_NAME
3319 && TREE_CODE (t) != FUNCTION_DECL
3320 && !is_gimple_val (t))
3321 {
3322 error ("PHI def is not a GIMPLE value");
3323 debug_generic_stmt (phi);
3324 debug_generic_stmt (t);
3325 err |= true;
3326 }
3327
3328 addr = walk_tree (&t, verify_expr, NULL, NULL);
3329 if (addr)
3330 {
3331 debug_generic_stmt (addr);
3332 err |= true;
3333 }
3334
3335 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3336 if (addr)
3337 {
3338 error ("Incorrect sharing of tree nodes");
3339 debug_generic_stmt (phi);
3340 debug_generic_stmt (addr);
3341 err |= true;
3342 }
3343 }
3344 }
3345
3346 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3347 {
3348 tree stmt = bsi_stmt (bsi);
3349 bsi_next (&bsi);
3350 err |= verify_stmt (stmt, bsi_end_p (bsi));
3351 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3352 if (addr)
3353 {
3354 error ("Incorrect sharing of tree nodes");
3355 debug_generic_stmt (stmt);
3356 debug_generic_stmt (addr);
3357 err |= true;
3358 }
3359 }
3360 }
3361
3362 if (err)
3363 internal_error ("verify_stmts failed.");
3364
3365 htab_delete (htab);
3366 timevar_pop (TV_TREE_STMT_VERIFY);
3367 }
3368
3369
3370 /* Verifies that the flow information is OK. */
3371
3372 static int
3373 tree_verify_flow_info (void)
3374 {
3375 int err = 0;
3376 basic_block bb;
3377 block_stmt_iterator bsi;
3378 tree stmt;
3379 edge e;
3380 edge_iterator ei;
3381
3382 if (ENTRY_BLOCK_PTR->stmt_list)
3383 {
3384 error ("ENTRY_BLOCK has a statement list associated with it\n");
3385 err = 1;
3386 }
3387
3388 if (EXIT_BLOCK_PTR->stmt_list)
3389 {
3390 error ("EXIT_BLOCK has a statement list associated with it\n");
3391 err = 1;
3392 }
3393
3394 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3395 if (e->flags & EDGE_FALLTHRU)
3396 {
3397 error ("Fallthru to exit from bb %d\n", e->src->index);
3398 err = 1;
3399 }
3400
3401 FOR_EACH_BB (bb)
3402 {
3403 bool found_ctrl_stmt = false;
3404
3405 /* Skip labels on the start of basic block. */
3406 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3407 {
3408 if (TREE_CODE (bsi_stmt (bsi)) != LABEL_EXPR)
3409 break;
3410
3411 if (label_to_block (LABEL_EXPR_LABEL (bsi_stmt (bsi))) != bb)
3412 {
3413 tree stmt = bsi_stmt (bsi);
3414 error ("Label %s to block does not match in bb %d\n",
3415 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3416 bb->index);
3417 err = 1;
3418 }
3419
3420 if (decl_function_context (LABEL_EXPR_LABEL (bsi_stmt (bsi)))
3421 != current_function_decl)
3422 {
3423 tree stmt = bsi_stmt (bsi);
3424 error ("Label %s has incorrect context in bb %d\n",
3425 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3426 bb->index);
3427 err = 1;
3428 }
3429 }
3430
3431 /* Verify that body of basic block BB is free of control flow. */
3432 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3433 {
3434 tree stmt = bsi_stmt (bsi);
3435
3436 if (found_ctrl_stmt)
3437 {
3438 error ("Control flow in the middle of basic block %d\n",
3439 bb->index);
3440 err = 1;
3441 }
3442
3443 if (stmt_ends_bb_p (stmt))
3444 found_ctrl_stmt = true;
3445
3446 if (TREE_CODE (stmt) == LABEL_EXPR)
3447 {
3448 error ("Label %s in the middle of basic block %d\n",
3449 IDENTIFIER_POINTER (DECL_NAME (stmt)),
3450 bb->index);
3451 err = 1;
3452 }
3453 }
3454 bsi = bsi_last (bb);
3455 if (bsi_end_p (bsi))
3456 continue;
3457
3458 stmt = bsi_stmt (bsi);
3459
3460 if (is_ctrl_stmt (stmt))
3461 {
3462 FOR_EACH_EDGE (e, ei, bb->succs)
3463 if (e->flags & EDGE_FALLTHRU)
3464 {
3465 error ("Fallthru edge after a control statement in bb %d \n",
3466 bb->index);
3467 err = 1;
3468 }
3469 }
3470
3471 switch (TREE_CODE (stmt))
3472 {
3473 case COND_EXPR:
3474 {
3475 edge true_edge;
3476 edge false_edge;
3477 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3478 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3479 {
3480 error ("Structured COND_EXPR at the end of bb %d\n", bb->index);
3481 err = 1;
3482 }
3483
3484 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3485
3486 if (!true_edge || !false_edge
3487 || !(true_edge->flags & EDGE_TRUE_VALUE)
3488 || !(false_edge->flags & EDGE_FALSE_VALUE)
3489 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3490 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3491 || EDGE_COUNT (bb->succs) >= 3)
3492 {
3493 error ("Wrong outgoing edge flags at end of bb %d\n",
3494 bb->index);
3495 err = 1;
3496 }
3497
3498 if (!has_label_p (true_edge->dest,
3499 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3500 {
3501 error ("%<then%> label does not match edge at end of bb %d\n",
3502 bb->index);
3503 err = 1;
3504 }
3505
3506 if (!has_label_p (false_edge->dest,
3507 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3508 {
3509 error ("%<else%> label does not match edge at end of bb %d\n",
3510 bb->index);
3511 err = 1;
3512 }
3513 }
3514 break;
3515
3516 case GOTO_EXPR:
3517 if (simple_goto_p (stmt))
3518 {
3519 error ("Explicit goto at end of bb %d\n", bb->index);
3520 err = 1;
3521 }
3522 else
3523 {
3524 /* FIXME. We should double check that the labels in the
3525 destination blocks have their address taken. */
3526 FOR_EACH_EDGE (e, ei, bb->succs)
3527 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3528 | EDGE_FALSE_VALUE))
3529 || !(e->flags & EDGE_ABNORMAL))
3530 {
3531 error ("Wrong outgoing edge flags at end of bb %d\n",
3532 bb->index);
3533 err = 1;
3534 }
3535 }
3536 break;
3537
3538 case RETURN_EXPR:
3539 if (EDGE_COUNT (bb->succs) != 1
3540 || (EDGE_SUCC (bb, 0)->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3541 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3542 {
3543 error ("Wrong outgoing edge flags at end of bb %d\n", bb->index);
3544 err = 1;
3545 }
3546 if (EDGE_SUCC (bb, 0)->dest != EXIT_BLOCK_PTR)
3547 {
3548 error ("Return edge does not point to exit in bb %d\n",
3549 bb->index);
3550 err = 1;
3551 }
3552 break;
3553
3554 case SWITCH_EXPR:
3555 {
3556 tree prev;
3557 edge e;
3558 size_t i, n;
3559 tree vec;
3560
3561 vec = SWITCH_LABELS (stmt);
3562 n = TREE_VEC_LENGTH (vec);
3563
3564 /* Mark all the destination basic blocks. */
3565 for (i = 0; i < n; ++i)
3566 {
3567 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3568 basic_block label_bb = label_to_block (lab);
3569
3570 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3571 label_bb->aux = (void *)1;
3572 }
3573
3574 /* Verify that the case labels are sorted. */
3575 prev = TREE_VEC_ELT (vec, 0);
3576 for (i = 1; i < n - 1; ++i)
3577 {
3578 tree c = TREE_VEC_ELT (vec, i);
3579 if (! CASE_LOW (c))
3580 {
3581 error ("Found default case not at end of case vector");
3582 err = 1;
3583 continue;
3584 }
3585 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3586 {
3587 error ("Case labels not sorted:\n ");
3588 print_generic_expr (stderr, prev, 0);
3589 fprintf (stderr," is greater than ");
3590 print_generic_expr (stderr, c, 0);
3591 fprintf (stderr," but comes before it.\n");
3592 err = 1;
3593 }
3594 prev = c;
3595 }
3596 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3597 {
3598 error ("No default case found at end of case vector");
3599 err = 1;
3600 }
3601
3602 FOR_EACH_EDGE (e, ei, bb->succs)
3603 {
3604 if (!e->dest->aux)
3605 {
3606 error ("Extra outgoing edge %d->%d\n",
3607 bb->index, e->dest->index);
3608 err = 1;
3609 }
3610 e->dest->aux = (void *)2;
3611 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3612 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3613 {
3614 error ("Wrong outgoing edge flags at end of bb %d\n",
3615 bb->index);
3616 err = 1;
3617 }
3618 }
3619
3620 /* Check that we have all of them. */
3621 for (i = 0; i < n; ++i)
3622 {
3623 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3624 basic_block label_bb = label_to_block (lab);
3625
3626 if (label_bb->aux != (void *)2)
3627 {
3628 error ("Missing edge %i->%i\n",
3629 bb->index, label_bb->index);
3630 err = 1;
3631 }
3632 }
3633
3634 FOR_EACH_EDGE (e, ei, bb->succs)
3635 e->dest->aux = (void *)0;
3636 }
3637
3638 default: ;
3639 }
3640 }
3641
3642 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3643 verify_dominators (CDI_DOMINATORS);
3644
3645 return err;
3646 }
3647
3648
3649 /* Updates phi nodes after creating a forwarder block joined
3650 by edge FALLTHRU. */
3651
3652 static void
3653 tree_make_forwarder_block (edge fallthru)
3654 {
3655 edge e;
3656 edge_iterator ei;
3657 basic_block dummy, bb;
3658 tree phi, new_phi, var, prev, next;
3659
3660 dummy = fallthru->src;
3661 bb = fallthru->dest;
3662
3663 if (EDGE_COUNT (bb->preds) == 1)
3664 return;
3665
3666 /* If we redirected a branch we must create new phi nodes at the
3667 start of BB. */
3668 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3669 {
3670 var = PHI_RESULT (phi);
3671 new_phi = create_phi_node (var, bb);
3672 SSA_NAME_DEF_STMT (var) = new_phi;
3673 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3674 add_phi_arg (&new_phi, PHI_RESULT (phi), fallthru);
3675 }
3676
3677 /* Ensure that the PHI node chain is in the same order. */
3678 prev = NULL;
3679 for (phi = phi_nodes (bb); phi; phi = next)
3680 {
3681 next = PHI_CHAIN (phi);
3682 PHI_CHAIN (phi) = prev;
3683 prev = phi;
3684 }
3685 set_phi_nodes (bb, prev);
3686
3687 /* Add the arguments we have stored on edges. */
3688 FOR_EACH_EDGE (e, ei, bb->preds)
3689 {
3690 if (e == fallthru)
3691 continue;
3692
3693 flush_pending_stmts (e);
3694 }
3695 }
3696
3697
3698 /* Return true if basic block BB does nothing except pass control
3699 flow to another block and that we can safely insert a label at
3700 the start of the successor block.
3701
3702 As a precondition, we require that BB be not equal to
3703 ENTRY_BLOCK_PTR. */
3704
3705 static bool
3706 tree_forwarder_block_p (basic_block bb)
3707 {
3708 block_stmt_iterator bsi;
3709 edge e;
3710 edge_iterator ei;
3711
3712 /* BB must have a single outgoing edge. */
3713 if (EDGE_COUNT (bb->succs) != 1
3714 /* BB can not have any PHI nodes. This could potentially be
3715 relaxed early in compilation if we re-rewrote the variables
3716 appearing in any PHI nodes in forwarder blocks. */
3717 || phi_nodes (bb)
3718 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
3719 || EDGE_SUCC (bb, 0)->dest == EXIT_BLOCK_PTR
3720 /* BB may not have an abnormal outgoing edge. */
3721 || (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
3722 return false;
3723
3724 #if ENABLE_CHECKING
3725 gcc_assert (bb != ENTRY_BLOCK_PTR);
3726 #endif
3727
3728 /* Successors of the entry block are not forwarders. */
3729 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3730 if (e->dest == bb)
3731 return false;
3732
3733 /* Now walk through the statements. We can ignore labels, anything else
3734 means this is not a forwarder block. */
3735 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3736 {
3737 tree stmt = bsi_stmt (bsi);
3738
3739 switch (TREE_CODE (stmt))
3740 {
3741 case LABEL_EXPR:
3742 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3743 return false;
3744 break;
3745
3746 default:
3747 return false;
3748 }
3749 }
3750
3751 return true;
3752 }
3753
3754 /* Thread jumps from BB. */
3755
3756 static bool
3757 thread_jumps_from_bb (basic_block bb)
3758 {
3759 edge_iterator ei;
3760 edge e;
3761 bool retval = false;
3762
3763 /* Examine each of our block's successors to see if it is
3764 forwardable. */
3765 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3766 {
3767 int freq;
3768 gcov_type count;
3769 edge last, old;
3770 basic_block dest, tmp, curr, old_dest;
3771 tree phi;
3772 int arg;
3773
3774 /* If the edge is abnormal or its destination is not
3775 forwardable, then there's nothing to do. */
3776 if ((e->flags & EDGE_ABNORMAL)
3777 || !bb_ann (e->dest)->forwardable)
3778 {
3779 ei_next (&ei);
3780 continue;
3781 }
3782
3783 /* Now walk through as many forwarder blocks as possible to find
3784 the ultimate destination we want to thread our jump to. */
3785 last = EDGE_SUCC (e->dest, 0);
3786 bb_ann (e->dest)->forwardable = 0;
3787 for (dest = EDGE_SUCC (e->dest, 0)->dest;
3788 bb_ann (dest)->forwardable;
3789 last = EDGE_SUCC (dest, 0),
3790 dest = EDGE_SUCC (dest, 0)->dest)
3791 bb_ann (dest)->forwardable = 0;
3792
3793 /* Reset the forwardable marks to 1. */
3794 for (tmp = e->dest;
3795 tmp != dest;
3796 tmp = EDGE_SUCC (tmp, 0)->dest)
3797 bb_ann (tmp)->forwardable = 1;
3798
3799 if (dest == e->dest)
3800 {
3801 ei_next (&ei);
3802 continue;
3803 }
3804
3805 old = find_edge (bb, dest);
3806 if (old)
3807 {
3808 /* If there already is an edge, check whether the values in
3809 phi nodes differ. */
3810 if (!phi_alternatives_equal (dest, last, old))
3811 {
3812 /* The previous block is forwarder. Redirect our jump
3813 to that target instead since we know it has no PHI
3814 nodes that will need updating. */
3815 dest = last->src;
3816
3817 /* That might mean that no forwarding at all is
3818 possible. */
3819 if (dest == e->dest)
3820 {
3821 ei_next (&ei);
3822 continue;
3823 }
3824
3825 old = find_edge (bb, dest);
3826 }
3827 }
3828
3829 /* Perform the redirection. */
3830 retval = true;
3831 count = e->count;
3832 freq = EDGE_FREQUENCY (e);
3833 old_dest = e->dest;
3834 e = redirect_edge_and_branch (e, dest);
3835
3836 /* Update the profile. */
3837 if (profile_status != PROFILE_ABSENT)
3838 for (curr = old_dest;
3839 curr != dest;
3840 curr = EDGE_SUCC (curr, 0)->dest)
3841 {
3842 curr->frequency -= freq;
3843 if (curr->frequency < 0)
3844 curr->frequency = 0;
3845 curr->count -= count;
3846 if (curr->count < 0)
3847 curr->count = 0;
3848 EDGE_SUCC (curr, 0)->count -= count;
3849 if (EDGE_SUCC (curr, 0)->count < 0)
3850 EDGE_SUCC (curr, 0)->count = 0;
3851 }
3852
3853 if (!old)
3854 {
3855 /* Update PHI nodes. We know that the new argument should
3856 have the same value as the argument associated with LAST.
3857 Otherwise we would have changed our target block
3858 above. */
3859 for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
3860 {
3861 arg = phi_arg_from_edge (phi, last);
3862 gcc_assert (arg >= 0);
3863 add_phi_arg (&phi, PHI_ARG_DEF (phi, arg), e);
3864 }
3865 }
3866
3867 /* Remove the unreachable blocks (observe that if all blocks
3868 were reachable before, only those in the path we threaded
3869 over and did not have any predecessor outside of the path
3870 become unreachable). */
3871 for (; old_dest != dest; old_dest = tmp)
3872 {
3873 tmp = EDGE_SUCC (old_dest, 0)->dest;
3874
3875 if (EDGE_COUNT (old_dest->preds) > 0)
3876 break;
3877
3878 delete_basic_block (old_dest);
3879 }
3880
3881 /* Update the dominators. */
3882 if (dom_info_available_p (CDI_DOMINATORS))
3883 {
3884 /* If the dominator of the destination was in the
3885 path, set its dominator to the start of the
3886 redirected edge. */
3887 if (get_immediate_dominator (CDI_DOMINATORS, old_dest) == NULL)
3888 set_immediate_dominator (CDI_DOMINATORS, old_dest, bb);
3889
3890 /* Now proceed like if we forwarded just over one edge at a
3891 time. Algorithm for forwarding edge S --> A over
3892 edge A --> B then is
3893
3894 if (idom (B) == A
3895 && !dominated_by (S, B))
3896 idom (B) = idom (A);
3897 recount_idom (A); */
3898
3899 for (; old_dest != dest; old_dest = tmp)
3900 {
3901 basic_block dom;
3902
3903 tmp = EDGE_SUCC (old_dest, 0)->dest;
3904
3905 if (get_immediate_dominator (CDI_DOMINATORS, tmp) == old_dest
3906 && !dominated_by_p (CDI_DOMINATORS, bb, tmp))
3907 {
3908 dom = get_immediate_dominator (CDI_DOMINATORS, old_dest);
3909 set_immediate_dominator (CDI_DOMINATORS, tmp, dom);
3910 }
3911
3912 dom = recount_dominator (CDI_DOMINATORS, old_dest);
3913 set_immediate_dominator (CDI_DOMINATORS, old_dest, dom);
3914 }
3915 }
3916 }
3917
3918 return retval;
3919 }
3920
3921
3922 /* Thread jumps over empty statements.
3923
3924 This code should _not_ thread over obviously equivalent conditions
3925 as that requires nontrivial updates to the SSA graph.
3926
3927 As a precondition, we require that all basic blocks be reachable.
3928 That is, there should be no opportunities left for
3929 delete_unreachable_blocks. */
3930
3931 static bool
3932 thread_jumps (void)
3933 {
3934 basic_block bb;
3935 bool retval = false;
3936 basic_block *worklist = xmalloc (sizeof (basic_block) * last_basic_block);
3937 basic_block *current = worklist;
3938
3939 FOR_EACH_BB (bb)
3940 {
3941 bb_ann (bb)->forwardable = tree_forwarder_block_p (bb);
3942 bb->flags &= ~BB_VISITED;
3943 }
3944
3945 /* We pretend to have ENTRY_BLOCK_PTR in WORKLIST. This way,
3946 ENTRY_BLOCK_PTR will never be entered into WORKLIST. */
3947 ENTRY_BLOCK_PTR->flags |= BB_VISITED;
3948
3949 /* Initialize WORKLIST by putting non-forwarder blocks that
3950 immediately precede forwarder blocks because those are the ones
3951 that we know we can thread jumps from. We use BB_VISITED to
3952 indicate whether a given basic block is in WORKLIST or not,
3953 thereby avoiding duplicates in WORKLIST. */
3954 FOR_EACH_BB (bb)
3955 {
3956 edge_iterator ei;
3957 edge e;
3958
3959 /* We are not interested in finding non-forwarder blocks
3960 directly. We want to find non-forwarder blocks as
3961 predecessors of a forwarder block. */
3962 if (!bb_ann (bb)->forwardable)
3963 continue;
3964
3965 /* Now we know BB is a forwarder block. Visit each of its
3966 incoming edges and add to WORKLIST all non-forwarder blocks
3967 among BB's predecessors. */
3968 FOR_EACH_EDGE (e, ei, bb->preds)
3969 {
3970 /* We don't want to put a duplicate into WORKLIST. */
3971 if ((e->src->flags & BB_VISITED) == 0
3972 /* We are not interested in threading jumps from a forwarder
3973 block. */
3974 && !bb_ann (e->src)->forwardable)
3975 {
3976 e->src->flags |= BB_VISITED;
3977 *current++ = e->src;
3978 }
3979 }
3980 }
3981
3982 /* Now let's drain WORKLIST. */
3983 while (worklist != current)
3984 {
3985 bb = *--current;
3986
3987 /* BB is no longer in WORKLIST, so clear BB_VISITED. */
3988 bb->flags &= ~BB_VISITED;
3989
3990 if (thread_jumps_from_bb (bb))
3991 {
3992 retval = true;
3993
3994 if (tree_forwarder_block_p (bb))
3995 {
3996 edge_iterator ej;
3997 edge f;
3998
3999 bb_ann (bb)->forwardable = true;
4000
4001 /* Attempts to thread through BB may have been blocked
4002 because BB was not a forwarder block before. Now
4003 that BB is a forwarder block, we should revisit BB's
4004 predecessors. */
4005 FOR_EACH_EDGE (f, ej, bb->preds)
4006 {
4007 /* We don't want to put a duplicate into WORKLIST. */
4008 if ((f->src->flags & BB_VISITED) == 0
4009 /* We are not interested in threading jumps from a
4010 forwarder block. */
4011 && !bb_ann (f->src)->forwardable)
4012 {
4013 f->src->flags |= BB_VISITED;
4014 *current++ = f->src;
4015 }
4016 }
4017 }
4018 }
4019 }
4020
4021 ENTRY_BLOCK_PTR->flags &= ~BB_VISITED;
4022
4023 free (worklist);
4024
4025 return retval;
4026 }
4027
4028
4029 /* Return a non-special label in the head of basic block BLOCK.
4030 Create one if it doesn't exist. */
4031
4032 tree
4033 tree_block_label (basic_block bb)
4034 {
4035 block_stmt_iterator i, s = bsi_start (bb);
4036 bool first = true;
4037 tree label, stmt;
4038
4039 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4040 {
4041 stmt = bsi_stmt (i);
4042 if (TREE_CODE (stmt) != LABEL_EXPR)
4043 break;
4044 label = LABEL_EXPR_LABEL (stmt);
4045 if (!DECL_NONLOCAL (label))
4046 {
4047 if (!first)
4048 bsi_move_before (&i, &s);
4049 return label;
4050 }
4051 }
4052
4053 label = create_artificial_label ();
4054 stmt = build1 (LABEL_EXPR, void_type_node, label);
4055 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4056 return label;
4057 }
4058
4059
4060 /* Attempt to perform edge redirection by replacing a possibly complex
4061 jump instruction by a goto or by removing the jump completely.
4062 This can apply only if all edges now point to the same block. The
4063 parameters and return values are equivalent to
4064 redirect_edge_and_branch. */
4065
4066 static edge
4067 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4068 {
4069 basic_block src = e->src;
4070 edge tmp;
4071 block_stmt_iterator b;
4072 tree stmt;
4073 edge_iterator ei;
4074
4075 /* Verify that all targets will be TARGET. */
4076 FOR_EACH_EDGE (tmp, ei, src->succs)
4077 if (tmp->dest != target && tmp != e)
4078 break;
4079
4080 if (tmp)
4081 return NULL;
4082
4083 b = bsi_last (src);
4084 if (bsi_end_p (b))
4085 return NULL;
4086 stmt = bsi_stmt (b);
4087
4088 if (TREE_CODE (stmt) == COND_EXPR
4089 || TREE_CODE (stmt) == SWITCH_EXPR)
4090 {
4091 bsi_remove (&b);
4092 e = ssa_redirect_edge (e, target);
4093 e->flags = EDGE_FALLTHRU;
4094 return e;
4095 }
4096
4097 return NULL;
4098 }
4099
4100
4101 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4102 edge representing the redirected branch. */
4103
4104 static edge
4105 tree_redirect_edge_and_branch (edge e, basic_block dest)
4106 {
4107 basic_block bb = e->src;
4108 block_stmt_iterator bsi;
4109 edge ret;
4110 tree label, stmt;
4111
4112 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4113 return NULL;
4114
4115 if (e->src != ENTRY_BLOCK_PTR
4116 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4117 return ret;
4118
4119 if (e->dest == dest)
4120 return NULL;
4121
4122 label = tree_block_label (dest);
4123
4124 bsi = bsi_last (bb);
4125 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4126
4127 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4128 {
4129 case COND_EXPR:
4130 stmt = (e->flags & EDGE_TRUE_VALUE
4131 ? COND_EXPR_THEN (stmt)
4132 : COND_EXPR_ELSE (stmt));
4133 GOTO_DESTINATION (stmt) = label;
4134 break;
4135
4136 case GOTO_EXPR:
4137 /* No non-abnormal edges should lead from a non-simple goto, and
4138 simple ones should be represented implicitly. */
4139 gcc_unreachable ();
4140
4141 case SWITCH_EXPR:
4142 {
4143 tree vec = SWITCH_LABELS (stmt);
4144 size_t i, n = TREE_VEC_LENGTH (vec);
4145
4146 for (i = 0; i < n; ++i)
4147 {
4148 tree elt = TREE_VEC_ELT (vec, i);
4149 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4150 CASE_LABEL (elt) = label;
4151 }
4152 }
4153 break;
4154
4155 case RETURN_EXPR:
4156 bsi_remove (&bsi);
4157 e->flags |= EDGE_FALLTHRU;
4158 break;
4159
4160 default:
4161 /* Otherwise it must be a fallthru edge, and we don't need to
4162 do anything besides redirecting it. */
4163 gcc_assert (e->flags & EDGE_FALLTHRU);
4164 break;
4165 }
4166
4167 /* Update/insert PHI nodes as necessary. */
4168
4169 /* Now update the edges in the CFG. */
4170 e = ssa_redirect_edge (e, dest);
4171
4172 return e;
4173 }
4174
4175
4176 /* Simple wrapper, as we can always redirect fallthru edges. */
4177
4178 static basic_block
4179 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4180 {
4181 e = tree_redirect_edge_and_branch (e, dest);
4182 gcc_assert (e);
4183
4184 return NULL;
4185 }
4186
4187
4188 /* Splits basic block BB after statement STMT (but at least after the
4189 labels). If STMT is NULL, BB is split just after the labels. */
4190
4191 static basic_block
4192 tree_split_block (basic_block bb, void *stmt)
4193 {
4194 block_stmt_iterator bsi, bsi_tgt;
4195 tree act;
4196 basic_block new_bb;
4197 edge e;
4198 edge_iterator ei;
4199
4200 new_bb = create_empty_bb (bb);
4201
4202 /* Redirect the outgoing edges. */
4203 new_bb->succs = bb->succs;
4204 bb->succs = NULL;
4205 FOR_EACH_EDGE (e, ei, new_bb->succs)
4206 e->src = new_bb;
4207
4208 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4209 stmt = NULL;
4210
4211 /* Move everything from BSI to the new basic block. */
4212 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4213 {
4214 act = bsi_stmt (bsi);
4215 if (TREE_CODE (act) == LABEL_EXPR)
4216 continue;
4217
4218 if (!stmt)
4219 break;
4220
4221 if (stmt == act)
4222 {
4223 bsi_next (&bsi);
4224 break;
4225 }
4226 }
4227
4228 bsi_tgt = bsi_start (new_bb);
4229 while (!bsi_end_p (bsi))
4230 {
4231 act = bsi_stmt (bsi);
4232 bsi_remove (&bsi);
4233 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4234 }
4235
4236 return new_bb;
4237 }
4238
4239
4240 /* Moves basic block BB after block AFTER. */
4241
4242 static bool
4243 tree_move_block_after (basic_block bb, basic_block after)
4244 {
4245 if (bb->prev_bb == after)
4246 return true;
4247
4248 unlink_block (bb);
4249 link_block (bb, after);
4250
4251 return true;
4252 }
4253
4254
4255 /* Return true if basic_block can be duplicated. */
4256
4257 static bool
4258 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4259 {
4260 return true;
4261 }
4262
4263 /* Create a duplicate of the basic block BB. NOTE: This does not
4264 preserve SSA form. */
4265
4266 static basic_block
4267 tree_duplicate_bb (basic_block bb)
4268 {
4269 basic_block new_bb;
4270 block_stmt_iterator bsi, bsi_tgt;
4271 tree phi, val;
4272 ssa_op_iter op_iter;
4273
4274 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4275
4276 /* First copy the phi nodes. We do not copy phi node arguments here,
4277 since the edges are not ready yet. Keep the chain of phi nodes in
4278 the same order, so that we can add them later. */
4279 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4280 {
4281 mark_for_rewrite (PHI_RESULT (phi));
4282 create_phi_node (PHI_RESULT (phi), new_bb);
4283 }
4284 set_phi_nodes (new_bb, nreverse (phi_nodes (new_bb)));
4285
4286 bsi_tgt = bsi_start (new_bb);
4287 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4288 {
4289 tree stmt = bsi_stmt (bsi);
4290 tree copy;
4291
4292 if (TREE_CODE (stmt) == LABEL_EXPR)
4293 continue;
4294
4295 /* Record the definitions. */
4296 get_stmt_operands (stmt);
4297
4298 FOR_EACH_SSA_TREE_OPERAND (val, stmt, op_iter, SSA_OP_ALL_DEFS)
4299 mark_for_rewrite (val);
4300
4301 copy = unshare_expr (stmt);
4302
4303 /* Copy also the virtual operands. */
4304 get_stmt_ann (copy);
4305 copy_virtual_operands (copy, stmt);
4306
4307 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4308 }
4309
4310 return new_bb;
4311 }
4312
4313 /* Basic block BB_COPY was created by code duplication. Add phi node
4314 arguments for edges going out of BB_COPY. The blocks that were
4315 duplicated have rbi->duplicated set to one. */
4316
4317 void
4318 add_phi_args_after_copy_bb (basic_block bb_copy)
4319 {
4320 basic_block bb, dest;
4321 edge e, e_copy;
4322 edge_iterator ei;
4323 tree phi, phi_copy, phi_next, def;
4324
4325 bb = bb_copy->rbi->original;
4326
4327 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4328 {
4329 if (!phi_nodes (e_copy->dest))
4330 continue;
4331
4332 if (e_copy->dest->rbi->duplicated)
4333 dest = e_copy->dest->rbi->original;
4334 else
4335 dest = e_copy->dest;
4336
4337 e = find_edge (bb, dest);
4338 if (!e)
4339 {
4340 /* During loop unrolling the target of the latch edge is copied.
4341 In this case we are not looking for edge to dest, but to
4342 duplicated block whose original was dest. */
4343 FOR_EACH_EDGE (e, ei, bb->succs)
4344 if (e->dest->rbi->duplicated
4345 && e->dest->rbi->original == dest)
4346 break;
4347
4348 gcc_assert (e != NULL);
4349 }
4350
4351 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4352 phi;
4353 phi = phi_next, phi_copy = TREE_CHAIN (phi_copy))
4354 {
4355 phi_next = TREE_CHAIN (phi);
4356
4357 gcc_assert (PHI_RESULT (phi) == PHI_RESULT (phi_copy));
4358 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4359 add_phi_arg (&phi_copy, def, e_copy);
4360 }
4361 }
4362 }
4363
4364 /* Blocks in REGION_COPY array of length N_REGION were created by
4365 duplication of basic blocks. Add phi node arguments for edges
4366 going from these blocks. */
4367
4368 void
4369 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4370 {
4371 unsigned i;
4372
4373 for (i = 0; i < n_region; i++)
4374 region_copy[i]->rbi->duplicated = 1;
4375
4376 for (i = 0; i < n_region; i++)
4377 add_phi_args_after_copy_bb (region_copy[i]);
4378
4379 for (i = 0; i < n_region; i++)
4380 region_copy[i]->rbi->duplicated = 0;
4381 }
4382
4383 /* Maps the old ssa name FROM_NAME to TO_NAME. */
4384
4385 struct ssa_name_map_entry
4386 {
4387 tree from_name;
4388 tree to_name;
4389 };
4390
4391 /* Hash function for ssa_name_map_entry. */
4392
4393 static hashval_t
4394 ssa_name_map_entry_hash (const void *entry)
4395 {
4396 const struct ssa_name_map_entry *en = entry;
4397 return SSA_NAME_VERSION (en->from_name);
4398 }
4399
4400 /* Equality function for ssa_name_map_entry. */
4401
4402 static int
4403 ssa_name_map_entry_eq (const void *in_table, const void *ssa_name)
4404 {
4405 const struct ssa_name_map_entry *en = in_table;
4406
4407 return en->from_name == ssa_name;
4408 }
4409
4410 /* Allocate duplicates of ssa names in list DEFINITIONS and store the mapping
4411 to MAP. */
4412
4413 void
4414 allocate_ssa_names (bitmap definitions, htab_t *map)
4415 {
4416 tree name;
4417 struct ssa_name_map_entry *entry;
4418 PTR *slot;
4419 unsigned ver;
4420 bitmap_iterator bi;
4421
4422 if (!*map)
4423 *map = htab_create (10, ssa_name_map_entry_hash,
4424 ssa_name_map_entry_eq, free);
4425 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4426 {
4427 name = ssa_name (ver);
4428 slot = htab_find_slot_with_hash (*map, name, SSA_NAME_VERSION (name),
4429 INSERT);
4430 if (*slot)
4431 entry = *slot;
4432 else
4433 {
4434 entry = xmalloc (sizeof (struct ssa_name_map_entry));
4435 entry->from_name = name;
4436 *slot = entry;
4437 }
4438 entry->to_name = duplicate_ssa_name (name, SSA_NAME_DEF_STMT (name));
4439 }
4440 }
4441
4442 /* Rewrite the definition DEF in statement STMT to new ssa name as specified
4443 by the mapping MAP. */
4444
4445 static void
4446 rewrite_to_new_ssa_names_def (def_operand_p def, tree stmt, htab_t map)
4447 {
4448 tree name = DEF_FROM_PTR (def);
4449 struct ssa_name_map_entry *entry;
4450
4451 gcc_assert (TREE_CODE (name) == SSA_NAME);
4452
4453 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4454 if (!entry)
4455 return;
4456
4457 SET_DEF (def, entry->to_name);
4458 SSA_NAME_DEF_STMT (entry->to_name) = stmt;
4459 }
4460
4461 /* Rewrite the USE to new ssa name as specified by the mapping MAP. */
4462
4463 static void
4464 rewrite_to_new_ssa_names_use (use_operand_p use, htab_t map)
4465 {
4466 tree name = USE_FROM_PTR (use);
4467 struct ssa_name_map_entry *entry;
4468
4469 if (TREE_CODE (name) != SSA_NAME)
4470 return;
4471
4472 entry = htab_find_with_hash (map, name, SSA_NAME_VERSION (name));
4473 if (!entry)
4474 return;
4475
4476 SET_USE (use, entry->to_name);
4477 }
4478
4479 /* Rewrite the ssa names in basic block BB to new ones as specified by the
4480 mapping MAP. */
4481
4482 void
4483 rewrite_to_new_ssa_names_bb (basic_block bb, htab_t map)
4484 {
4485 unsigned i;
4486 edge e;
4487 edge_iterator ei;
4488 tree phi, stmt;
4489 block_stmt_iterator bsi;
4490 use_optype uses;
4491 vuse_optype vuses;
4492 def_optype defs;
4493 v_may_def_optype v_may_defs;
4494 v_must_def_optype v_must_defs;
4495 stmt_ann_t ann;
4496
4497 FOR_EACH_EDGE (e, ei, bb->preds)
4498 if (e->flags & EDGE_ABNORMAL)
4499 break;
4500
4501 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4502 {
4503 rewrite_to_new_ssa_names_def (PHI_RESULT_PTR (phi), phi, map);
4504 if (e)
4505 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)) = 1;
4506 }
4507
4508 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4509 {
4510 stmt = bsi_stmt (bsi);
4511 get_stmt_operands (stmt);
4512 ann = stmt_ann (stmt);
4513
4514 uses = USE_OPS (ann);
4515 for (i = 0; i < NUM_USES (uses); i++)
4516 rewrite_to_new_ssa_names_use (USE_OP_PTR (uses, i), map);
4517
4518 defs = DEF_OPS (ann);
4519 for (i = 0; i < NUM_DEFS (defs); i++)
4520 rewrite_to_new_ssa_names_def (DEF_OP_PTR (defs, i), stmt, map);
4521
4522 vuses = VUSE_OPS (ann);
4523 for (i = 0; i < NUM_VUSES (vuses); i++)
4524 rewrite_to_new_ssa_names_use (VUSE_OP_PTR (vuses, i), map);
4525
4526 v_may_defs = V_MAY_DEF_OPS (ann);
4527 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
4528 {
4529 rewrite_to_new_ssa_names_use
4530 (V_MAY_DEF_OP_PTR (v_may_defs, i), map);
4531 rewrite_to_new_ssa_names_def
4532 (V_MAY_DEF_RESULT_PTR (v_may_defs, i), stmt, map);
4533 }
4534
4535 v_must_defs = V_MUST_DEF_OPS (ann);
4536 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
4537 {
4538 rewrite_to_new_ssa_names_def
4539 (V_MUST_DEF_RESULT_PTR (v_must_defs, i), stmt, map);
4540 rewrite_to_new_ssa_names_use
4541 (V_MUST_DEF_KILL_PTR (v_must_defs, i), map);
4542 }
4543 }
4544
4545 FOR_EACH_EDGE (e, ei, bb->succs)
4546 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
4547 {
4548 rewrite_to_new_ssa_names_use
4549 (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), map);
4550
4551 if (e->flags & EDGE_ABNORMAL)
4552 {
4553 tree op = PHI_ARG_DEF_FROM_EDGE (phi, e);
4554 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (op) = 1;
4555 }
4556 }
4557 }
4558
4559 /* Rewrite the ssa names in N_REGION blocks REGION to the new ones as specified
4560 by the mapping MAP. */
4561
4562 void
4563 rewrite_to_new_ssa_names (basic_block *region, unsigned n_region, htab_t map)
4564 {
4565 unsigned r;
4566
4567 for (r = 0; r < n_region; r++)
4568 rewrite_to_new_ssa_names_bb (region[r], map);
4569 }
4570
4571 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4572 important exit edge EXIT. By important we mean that no SSA name defined
4573 inside region is live over the other exit edges of the region. All entry
4574 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4575 to the duplicate of the region. SSA form, dominance and loop information
4576 is updated. The new basic blocks are stored to REGION_COPY in the same
4577 order as they had in REGION, provided that REGION_COPY is not NULL.
4578 The function returns false if it is unable to copy the region,
4579 true otherwise. */
4580
4581 bool
4582 tree_duplicate_sese_region (edge entry, edge exit,
4583 basic_block *region, unsigned n_region,
4584 basic_block *region_copy)
4585 {
4586 unsigned i, n_doms, ver;
4587 bool free_region_copy = false, copying_header = false;
4588 struct loop *loop = entry->dest->loop_father;
4589 edge exit_copy;
4590 bitmap definitions;
4591 tree phi;
4592 basic_block *doms;
4593 htab_t ssa_name_map = NULL;
4594 edge redirected;
4595 bitmap_iterator bi;
4596
4597 if (!can_copy_bbs_p (region, n_region))
4598 return false;
4599
4600 /* Some sanity checking. Note that we do not check for all possible
4601 missuses of the functions. I.e. if you ask to copy something weird,
4602 it will work, but the state of structures probably will not be
4603 correct. */
4604
4605 for (i = 0; i < n_region; i++)
4606 {
4607 /* We do not handle subloops, i.e. all the blocks must belong to the
4608 same loop. */
4609 if (region[i]->loop_father != loop)
4610 return false;
4611
4612 if (region[i] != entry->dest
4613 && region[i] == loop->header)
4614 return false;
4615 }
4616
4617 loop->copy = loop;
4618
4619 /* In case the function is used for loop header copying (which is the primary
4620 use), ensure that EXIT and its copy will be new latch and entry edges. */
4621 if (loop->header == entry->dest)
4622 {
4623 copying_header = true;
4624 loop->copy = loop->outer;
4625
4626 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4627 return false;
4628
4629 for (i = 0; i < n_region; i++)
4630 if (region[i] != exit->src
4631 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4632 return false;
4633 }
4634
4635 if (!region_copy)
4636 {
4637 region_copy = xmalloc (sizeof (basic_block) * n_region);
4638 free_region_copy = true;
4639 }
4640
4641 gcc_assert (!any_marked_for_rewrite_p ());
4642
4643 /* Record blocks outside the region that are duplicated by something
4644 inside. */
4645 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4646 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4647
4648 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop);
4649 definitions = marked_ssa_names ();
4650
4651 if (copying_header)
4652 {
4653 loop->header = exit->dest;
4654 loop->latch = exit->src;
4655 }
4656
4657 /* Redirect the entry and add the phi node arguments. */
4658 redirected = redirect_edge_and_branch (entry, entry->dest->rbi->copy);
4659 gcc_assert (redirected != NULL);
4660 flush_pending_stmts (entry);
4661
4662 /* Concerning updating of dominators: We must recount dominators
4663 for entry block and its copy. Anything that is outside of the region, but
4664 was dominated by something inside needs recounting as well. */
4665 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4666 doms[n_doms++] = entry->dest->rbi->original;
4667 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4668 free (doms);
4669
4670 /* Add the other phi node arguments. */
4671 add_phi_args_after_copy (region_copy, n_region);
4672
4673 /* Add phi nodes for definitions at exit. TODO -- once we have immediate
4674 uses, it should be possible to emit phi nodes just for definitions that
4675 are used outside region. */
4676 EXECUTE_IF_SET_IN_BITMAP (definitions, 0, ver, bi)
4677 {
4678 tree name = ssa_name (ver);
4679
4680 phi = create_phi_node (name, exit->dest);
4681 add_phi_arg (&phi, name, exit);
4682 add_phi_arg (&phi, name, exit_copy);
4683
4684 SSA_NAME_DEF_STMT (name) = phi;
4685 }
4686
4687 /* And create new definitions inside region and its copy. TODO -- once we
4688 have immediate uses, it might be better to leave definitions in region
4689 unchanged, create new ssa names for phi nodes on exit, and rewrite
4690 the uses, to avoid changing the copied region. */
4691 allocate_ssa_names (definitions, &ssa_name_map);
4692 rewrite_to_new_ssa_names (region, n_region, ssa_name_map);
4693 allocate_ssa_names (definitions, &ssa_name_map);
4694 rewrite_to_new_ssa_names (region_copy, n_region, ssa_name_map);
4695 htab_delete (ssa_name_map);
4696
4697 if (free_region_copy)
4698 free (region_copy);
4699
4700 unmark_all_for_rewrite ();
4701 BITMAP_XFREE (definitions);
4702
4703 return true;
4704 }
4705
4706 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4707
4708 void
4709 dump_function_to_file (tree fn, FILE *file, int flags)
4710 {
4711 tree arg, vars, var;
4712 bool ignore_topmost_bind = false, any_var = false;
4713 basic_block bb;
4714 tree chain;
4715
4716 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4717
4718 arg = DECL_ARGUMENTS (fn);
4719 while (arg)
4720 {
4721 print_generic_expr (file, arg, dump_flags);
4722 if (TREE_CHAIN (arg))
4723 fprintf (file, ", ");
4724 arg = TREE_CHAIN (arg);
4725 }
4726 fprintf (file, ")\n");
4727
4728 if (flags & TDF_RAW)
4729 {
4730 dump_node (fn, TDF_SLIM | flags, file);
4731 return;
4732 }
4733
4734 /* When GIMPLE is lowered, the variables are no longer available in
4735 BIND_EXPRs, so display them separately. */
4736 if (cfun && cfun->unexpanded_var_list)
4737 {
4738 ignore_topmost_bind = true;
4739
4740 fprintf (file, "{\n");
4741 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4742 {
4743 var = TREE_VALUE (vars);
4744
4745 print_generic_decl (file, var, flags);
4746 fprintf (file, "\n");
4747
4748 any_var = true;
4749 }
4750 }
4751
4752 if (basic_block_info)
4753 {
4754 /* Make a CFG based dump. */
4755 check_bb_profile (ENTRY_BLOCK_PTR, file);
4756 if (!ignore_topmost_bind)
4757 fprintf (file, "{\n");
4758
4759 if (any_var && n_basic_blocks)
4760 fprintf (file, "\n");
4761
4762 FOR_EACH_BB (bb)
4763 dump_generic_bb (file, bb, 2, flags);
4764
4765 fprintf (file, "}\n");
4766 check_bb_profile (EXIT_BLOCK_PTR, file);
4767 }
4768 else
4769 {
4770 int indent;
4771
4772 /* Make a tree based dump. */
4773 chain = DECL_SAVED_TREE (fn);
4774
4775 if (TREE_CODE (chain) == BIND_EXPR)
4776 {
4777 if (ignore_topmost_bind)
4778 {
4779 chain = BIND_EXPR_BODY (chain);
4780 indent = 2;
4781 }
4782 else
4783 indent = 0;
4784 }
4785 else
4786 {
4787 if (!ignore_topmost_bind)
4788 fprintf (file, "{\n");
4789 indent = 2;
4790 }
4791
4792 if (any_var)
4793 fprintf (file, "\n");
4794
4795 print_generic_stmt_indented (file, chain, flags, indent);
4796 if (ignore_topmost_bind)
4797 fprintf (file, "}\n");
4798 }
4799
4800 fprintf (file, "\n\n");
4801 }
4802
4803
4804 /* Pretty print of the loops intermediate representation. */
4805 static void print_loop (FILE *, struct loop *, int);
4806 static void print_pred_bbs (FILE *, basic_block bb);
4807 static void print_succ_bbs (FILE *, basic_block bb);
4808
4809
4810 /* Print the predecessors indexes of edge E on FILE. */
4811
4812 static void
4813 print_pred_bbs (FILE *file, basic_block bb)
4814 {
4815 edge e;
4816 edge_iterator ei;
4817
4818 FOR_EACH_EDGE (e, ei, bb->preds)
4819 fprintf (file, "bb_%d", e->src->index);
4820 }
4821
4822
4823 /* Print the successors indexes of edge E on FILE. */
4824
4825 static void
4826 print_succ_bbs (FILE *file, basic_block bb)
4827 {
4828 edge e;
4829 edge_iterator ei;
4830
4831 FOR_EACH_EDGE (e, ei, bb->succs)
4832 fprintf (file, "bb_%d", e->src->index);
4833 }
4834
4835
4836 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4837
4838 static void
4839 print_loop (FILE *file, struct loop *loop, int indent)
4840 {
4841 char *s_indent;
4842 basic_block bb;
4843
4844 if (loop == NULL)
4845 return;
4846
4847 s_indent = (char *) alloca ((size_t) indent + 1);
4848 memset ((void *) s_indent, ' ', (size_t) indent);
4849 s_indent[indent] = '\0';
4850
4851 /* Print the loop's header. */
4852 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4853
4854 /* Print the loop's body. */
4855 fprintf (file, "%s{\n", s_indent);
4856 FOR_EACH_BB (bb)
4857 if (bb->loop_father == loop)
4858 {
4859 /* Print the basic_block's header. */
4860 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
4861 print_pred_bbs (file, bb);
4862 fprintf (file, "}, succs = {");
4863 print_succ_bbs (file, bb);
4864 fprintf (file, "})\n");
4865
4866 /* Print the basic_block's body. */
4867 fprintf (file, "%s {\n", s_indent);
4868 tree_dump_bb (bb, file, indent + 4);
4869 fprintf (file, "%s }\n", s_indent);
4870 }
4871
4872 print_loop (file, loop->inner, indent + 2);
4873 fprintf (file, "%s}\n", s_indent);
4874 print_loop (file, loop->next, indent);
4875 }
4876
4877
4878 /* Follow a CFG edge from the entry point of the program, and on entry
4879 of a loop, pretty print the loop structure on FILE. */
4880
4881 void
4882 print_loop_ir (FILE *file)
4883 {
4884 basic_block bb;
4885
4886 bb = BASIC_BLOCK (0);
4887 if (bb && bb->loop_father)
4888 print_loop (file, bb->loop_father, 0);
4889 }
4890
4891
4892 /* Debugging loops structure at tree level. */
4893
4894 void
4895 debug_loop_ir (void)
4896 {
4897 print_loop_ir (stderr);
4898 }
4899
4900
4901 /* Return true if BB ends with a call, possibly followed by some
4902 instructions that must stay with the call. Return false,
4903 otherwise. */
4904
4905 static bool
4906 tree_block_ends_with_call_p (basic_block bb)
4907 {
4908 block_stmt_iterator bsi = bsi_last (bb);
4909 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
4910 }
4911
4912
4913 /* Return true if BB ends with a conditional branch. Return false,
4914 otherwise. */
4915
4916 static bool
4917 tree_block_ends_with_condjump_p (basic_block bb)
4918 {
4919 tree stmt = tsi_stmt (bsi_last (bb).tsi);
4920 return (TREE_CODE (stmt) == COND_EXPR);
4921 }
4922
4923
4924 /* Return true if we need to add fake edge to exit at statement T.
4925 Helper function for tree_flow_call_edges_add. */
4926
4927 static bool
4928 need_fake_edge_p (tree t)
4929 {
4930 tree call;
4931
4932 /* NORETURN and LONGJMP calls already have an edge to exit.
4933 CONST, PURE and ALWAYS_RETURN calls do not need one.
4934 We don't currently check for CONST and PURE here, although
4935 it would be a good idea, because those attributes are
4936 figured out from the RTL in mark_constant_function, and
4937 the counter incrementation code from -fprofile-arcs
4938 leads to different results from -fbranch-probabilities. */
4939 call = get_call_expr_in (t);
4940 if (call
4941 && !(call_expr_flags (call) &
4942 (ECF_NORETURN | ECF_LONGJMP | ECF_ALWAYS_RETURN)))
4943 return true;
4944
4945 if (TREE_CODE (t) == ASM_EXPR
4946 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
4947 return true;
4948
4949 return false;
4950 }
4951
4952
4953 /* Add fake edges to the function exit for any non constant and non
4954 noreturn calls, volatile inline assembly in the bitmap of blocks
4955 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
4956 the number of blocks that were split.
4957
4958 The goal is to expose cases in which entering a basic block does
4959 not imply that all subsequent instructions must be executed. */
4960
4961 static int
4962 tree_flow_call_edges_add (sbitmap blocks)
4963 {
4964 int i;
4965 int blocks_split = 0;
4966 int last_bb = last_basic_block;
4967 bool check_last_block = false;
4968
4969 if (n_basic_blocks == 0)
4970 return 0;
4971
4972 if (! blocks)
4973 check_last_block = true;
4974 else
4975 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4976
4977 /* In the last basic block, before epilogue generation, there will be
4978 a fallthru edge to EXIT. Special care is required if the last insn
4979 of the last basic block is a call because make_edge folds duplicate
4980 edges, which would result in the fallthru edge also being marked
4981 fake, which would result in the fallthru edge being removed by
4982 remove_fake_edges, which would result in an invalid CFG.
4983
4984 Moreover, we can't elide the outgoing fake edge, since the block
4985 profiler needs to take this into account in order to solve the minimal
4986 spanning tree in the case that the call doesn't return.
4987
4988 Handle this by adding a dummy instruction in a new last basic block. */
4989 if (check_last_block)
4990 {
4991 edge_iterator ei;
4992 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4993 block_stmt_iterator bsi = bsi_last (bb);
4994 tree t = NULL_TREE;
4995 if (!bsi_end_p (bsi))
4996 t = bsi_stmt (bsi);
4997
4998 if (need_fake_edge_p (t))
4999 {
5000 edge e;
5001
5002 FOR_EACH_EDGE (e, ei, bb->succs)
5003 if (e->dest == EXIT_BLOCK_PTR)
5004 {
5005 bsi_insert_on_edge (e, build_empty_stmt ());
5006 bsi_commit_edge_inserts ((int *)NULL);
5007 break;
5008 }
5009 }
5010 }
5011
5012 /* Now add fake edges to the function exit for any non constant
5013 calls since there is no way that we can determine if they will
5014 return or not... */
5015 for (i = 0; i < last_bb; i++)
5016 {
5017 basic_block bb = BASIC_BLOCK (i);
5018 block_stmt_iterator bsi;
5019 tree stmt, last_stmt;
5020
5021 if (!bb)
5022 continue;
5023
5024 if (blocks && !TEST_BIT (blocks, i))
5025 continue;
5026
5027 bsi = bsi_last (bb);
5028 if (!bsi_end_p (bsi))
5029 {
5030 last_stmt = bsi_stmt (bsi);
5031 do
5032 {
5033 stmt = bsi_stmt (bsi);
5034 if (need_fake_edge_p (stmt))
5035 {
5036 edge e;
5037 /* The handling above of the final block before the
5038 epilogue should be enough to verify that there is
5039 no edge to the exit block in CFG already.
5040 Calling make_edge in such case would cause us to
5041 mark that edge as fake and remove it later. */
5042 #ifdef ENABLE_CHECKING
5043 if (stmt == last_stmt)
5044 {
5045 edge_iterator ei;
5046 FOR_EACH_EDGE (e, ei, bb->succs)
5047 gcc_assert (e->dest != EXIT_BLOCK_PTR);
5048 }
5049 #endif
5050
5051 /* Note that the following may create a new basic block
5052 and renumber the existing basic blocks. */
5053 if (stmt != last_stmt)
5054 {
5055 e = split_block (bb, stmt);
5056 if (e)
5057 blocks_split++;
5058 }
5059 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5060 }
5061 bsi_prev (&bsi);
5062 }
5063 while (!bsi_end_p (bsi));
5064 }
5065 }
5066
5067 if (blocks_split)
5068 verify_flow_info ();
5069
5070 return blocks_split;
5071 }
5072
5073 bool
5074 tree_purge_dead_eh_edges (basic_block bb)
5075 {
5076 bool changed = false;
5077 edge e;
5078 edge_iterator ei;
5079 tree stmt = last_stmt (bb);
5080
5081 if (stmt && tree_can_throw_internal (stmt))
5082 return false;
5083
5084 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5085 {
5086 if (e->flags & EDGE_EH)
5087 {
5088 ssa_remove_edge (e);
5089 changed = true;
5090 }
5091 else
5092 ei_next (&ei);
5093 }
5094
5095 /* Removal of dead EH edges might change dominators of not
5096 just immediate successors. E.g. when bb1 is changed so that
5097 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5098 eh edges purged by this function in:
5099 0
5100 / \
5101 v v
5102 1-->2
5103 / \ |
5104 v v |
5105 3-->4 |
5106 \ v
5107 --->5
5108 |
5109 -
5110 idom(bb5) must be recomputed. For now just free the dominance
5111 info. */
5112 if (changed)
5113 free_dominance_info (CDI_DOMINATORS);
5114
5115 return changed;
5116 }
5117
5118 bool
5119 tree_purge_all_dead_eh_edges (bitmap blocks)
5120 {
5121 bool changed = false;
5122 unsigned i;
5123 bitmap_iterator bi;
5124
5125 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5126 {
5127 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5128 }
5129
5130 return changed;
5131 }
5132
5133 struct cfg_hooks tree_cfg_hooks = {
5134 "tree",
5135 tree_verify_flow_info,
5136 tree_dump_bb, /* dump_bb */
5137 create_bb, /* create_basic_block */
5138 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5139 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5140 remove_bb, /* delete_basic_block */
5141 tree_split_block, /* split_block */
5142 tree_move_block_after, /* move_block_after */
5143 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5144 tree_merge_blocks, /* merge_blocks */
5145 tree_predict_edge, /* predict_edge */
5146 tree_predicted_by_p, /* predicted_by_p */
5147 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5148 tree_duplicate_bb, /* duplicate_block */
5149 tree_split_edge, /* split_edge */
5150 tree_make_forwarder_block, /* make_forward_block */
5151 NULL, /* tidy_fallthru_edge */
5152 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5153 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5154 tree_flow_call_edges_add /* flow_call_edges_add */
5155 };
5156
5157
5158 /* Split all critical edges. */
5159
5160 static void
5161 split_critical_edges (void)
5162 {
5163 basic_block bb;
5164 edge e;
5165 edge_iterator ei;
5166
5167 FOR_ALL_BB (bb)
5168 {
5169 FOR_EACH_EDGE (e, ei, bb->succs)
5170 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5171 {
5172 split_edge (e);
5173 }
5174 }
5175 }
5176
5177 struct tree_opt_pass pass_split_crit_edges =
5178 {
5179 "crited", /* name */
5180 NULL, /* gate */
5181 split_critical_edges, /* execute */
5182 NULL, /* sub */
5183 NULL, /* next */
5184 0, /* static_pass_number */
5185 TV_TREE_SPLIT_EDGES, /* tv_id */
5186 PROP_cfg, /* properties required */
5187 PROP_no_crit_edges, /* properties_provided */
5188 0, /* properties_destroyed */
5189 0, /* todo_flags_start */
5190 TODO_dump_func, /* todo_flags_finish */
5191 0 /* letter */
5192 };
5193
5194 \f
5195 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5196 a temporary, make sure and register it to be renamed if necessary,
5197 and finally return the temporary. Put the statements to compute
5198 EXP before the current statement in BSI. */
5199
5200 tree
5201 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5202 {
5203 tree t, new_stmt, orig_stmt;
5204
5205 if (is_gimple_val (exp))
5206 return exp;
5207
5208 t = make_rename_temp (type, NULL);
5209 new_stmt = build (MODIFY_EXPR, type, t, exp);
5210
5211 orig_stmt = bsi_stmt (*bsi);
5212 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5213 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5214
5215 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5216
5217 return t;
5218 }
5219
5220 /* Build a ternary operation and gimplify it. Emit code before BSI.
5221 Return the gimple_val holding the result. */
5222
5223 tree
5224 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5225 tree type, tree a, tree b, tree c)
5226 {
5227 tree ret;
5228
5229 ret = fold (build3 (code, type, a, b, c));
5230 STRIP_NOPS (ret);
5231
5232 return gimplify_val (bsi, type, ret);
5233 }
5234
5235 /* Build a binary operation and gimplify it. Emit code before BSI.
5236 Return the gimple_val holding the result. */
5237
5238 tree
5239 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5240 tree type, tree a, tree b)
5241 {
5242 tree ret;
5243
5244 ret = fold (build2 (code, type, a, b));
5245 STRIP_NOPS (ret);
5246
5247 return gimplify_val (bsi, type, ret);
5248 }
5249
5250 /* Build a unary operation and gimplify it. Emit code before BSI.
5251 Return the gimple_val holding the result. */
5252
5253 tree
5254 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5255 tree a)
5256 {
5257 tree ret;
5258
5259 ret = fold (build1 (code, type, a));
5260 STRIP_NOPS (ret);
5261
5262 return gimplify_val (bsi, type, ret);
5263 }
5264
5265
5266 \f
5267 /* Emit return warnings. */
5268
5269 static void
5270 execute_warn_function_return (void)
5271 {
5272 #ifdef USE_MAPPED_LOCATION
5273 source_location location;
5274 #else
5275 location_t *locus;
5276 #endif
5277 tree last;
5278 edge e;
5279 edge_iterator ei;
5280
5281 if (warn_missing_noreturn
5282 && !TREE_THIS_VOLATILE (cfun->decl)
5283 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5284 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5285 warning ("%Jfunction might be possible candidate for "
5286 "attribute %<noreturn%>",
5287 cfun->decl);
5288
5289 /* If we have a path to EXIT, then we do return. */
5290 if (TREE_THIS_VOLATILE (cfun->decl)
5291 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5292 {
5293 #ifdef USE_MAPPED_LOCATION
5294 location = UNKNOWN_LOCATION;
5295 #else
5296 locus = NULL;
5297 #endif
5298 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5299 {
5300 last = last_stmt (e->src);
5301 if (TREE_CODE (last) == RETURN_EXPR
5302 #ifdef USE_MAPPED_LOCATION
5303 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5304 #else
5305 && (locus = EXPR_LOCUS (last)) != NULL)
5306 #endif
5307 break;
5308 }
5309 #ifdef USE_MAPPED_LOCATION
5310 if (location == UNKNOWN_LOCATION)
5311 location = cfun->function_end_locus;
5312 warning ("%H%<noreturn%> function does return", &location);
5313 #else
5314 if (!locus)
5315 locus = &cfun->function_end_locus;
5316 warning ("%H%<noreturn%> function does return", locus);
5317 #endif
5318 }
5319
5320 /* If we see "return;" in some basic block, then we do reach the end
5321 without returning a value. */
5322 else if (warn_return_type
5323 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5324 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5325 {
5326 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5327 {
5328 tree last = last_stmt (e->src);
5329 if (TREE_CODE (last) == RETURN_EXPR
5330 && TREE_OPERAND (last, 0) == NULL)
5331 {
5332 #ifdef USE_MAPPED_LOCATION
5333 location = EXPR_LOCATION (last);
5334 if (location == UNKNOWN_LOCATION)
5335 location = cfun->function_end_locus;
5336 warning ("%Hcontrol reaches end of non-void function", &location);
5337 #else
5338 locus = EXPR_LOCUS (last);
5339 if (!locus)
5340 locus = &cfun->function_end_locus;
5341 warning ("%Hcontrol reaches end of non-void function", locus);
5342 #endif
5343 break;
5344 }
5345 }
5346 }
5347 }
5348
5349
5350 /* Given a basic block B which ends with a conditional and has
5351 precisely two successors, determine which of the edges is taken if
5352 the conditional is true and which is taken if the conditional is
5353 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5354
5355 void
5356 extract_true_false_edges_from_block (basic_block b,
5357 edge *true_edge,
5358 edge *false_edge)
5359 {
5360 edge e = EDGE_SUCC (b, 0);
5361
5362 if (e->flags & EDGE_TRUE_VALUE)
5363 {
5364 *true_edge = e;
5365 *false_edge = EDGE_SUCC (b, 1);
5366 }
5367 else
5368 {
5369 *false_edge = e;
5370 *true_edge = EDGE_SUCC (b, 1);
5371 }
5372 }
5373
5374 struct tree_opt_pass pass_warn_function_return =
5375 {
5376 NULL, /* name */
5377 NULL, /* gate */
5378 execute_warn_function_return, /* execute */
5379 NULL, /* sub */
5380 NULL, /* next */
5381 0, /* static_pass_number */
5382 0, /* tv_id */
5383 PROP_cfg, /* properties_required */
5384 0, /* properties_provided */
5385 0, /* properties_destroyed */
5386 0, /* todo_flags_start */
5387 0, /* todo_flags_finish */
5388 0 /* letter */
5389 };
5390
5391 #include "gt-tree-cfg.h"