3034ba339e1e2600fbd2e3f6302e1142064ebde6
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
50
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54 /* Local declarations. */
55
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
58
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
63
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
66
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
72 static struct pointer_map_t *edge_to_cases;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Basic blocks and flowgraphs. */
86 static basic_block create_bb (void *, void *, basic_block);
87 static void make_blocks (tree);
88 static void factor_computed_gotos (void);
89
90 /* Edges. */
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block);
93 static void make_switch_expr_edges (basic_block);
94 static void make_goto_expr_edges (basic_block);
95 static edge tree_redirect_edge_and_branch (edge, basic_block);
96 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
97 static unsigned int split_critical_edges (void);
98
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (const_tree, const_tree);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t, basic_block bb);
105 static bool computed_goto_p (const_tree);
106
107 /* Flowgraph optimization and cleanup. */
108 static void tree_merge_blocks (basic_block, basic_block);
109 static bool tree_can_merge_blocks_p (basic_block, basic_block);
110 static void remove_bb (basic_block);
111 static edge find_taken_edge_computed_goto (basic_block, tree);
112 static edge find_taken_edge_cond_expr (basic_block, tree);
113 static edge find_taken_edge_switch_expr (basic_block, tree);
114 static tree find_case_label_for_value (tree, tree);
115
116 void
117 init_empty_tree_cfg_for_function (struct function *fn)
118 {
119 /* Initialize the basic block array. */
120 init_flow (fn);
121 profile_status_for_function (fn) = PROFILE_ABSENT;
122 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
123 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
124 basic_block_info_for_function (fn)
125 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
126 VEC_safe_grow_cleared (basic_block, gc,
127 basic_block_info_for_function (fn),
128 initial_cfg_capacity);
129
130 /* Build a mapping of labels to their associated blocks. */
131 label_to_block_map_for_function (fn)
132 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
133 VEC_safe_grow_cleared (basic_block, gc,
134 label_to_block_map_for_function (fn),
135 initial_cfg_capacity);
136
137 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
138 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
139 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
140 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
141
142 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
143 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
144 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
145 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
146 }
147
148 void
149 init_empty_tree_cfg (void)
150 {
151 init_empty_tree_cfg_for_function (cfun);
152 }
153
154 /*---------------------------------------------------------------------------
155 Create basic blocks
156 ---------------------------------------------------------------------------*/
157
158 /* Entry point to the CFG builder for trees. TP points to the list of
159 statements to be added to the flowgraph. */
160
161 static void
162 build_tree_cfg (tree *tp)
163 {
164 /* Register specific tree functions. */
165 tree_register_cfg_hooks ();
166
167 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
168
169 init_empty_tree_cfg ();
170
171 found_computed_goto = 0;
172 make_blocks (*tp);
173
174 /* Computed gotos are hell to deal with, especially if there are
175 lots of them with a large number of destinations. So we factor
176 them to a common computed goto location before we build the
177 edge list. After we convert back to normal form, we will un-factor
178 the computed gotos since factoring introduces an unwanted jump. */
179 if (found_computed_goto)
180 factor_computed_gotos ();
181
182 /* Make sure there is always at least one block, even if it's empty. */
183 if (n_basic_blocks == NUM_FIXED_BLOCKS)
184 create_empty_bb (ENTRY_BLOCK_PTR);
185
186 /* Adjust the size of the array. */
187 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
188 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
189
190 /* To speed up statement iterator walks, we first purge dead labels. */
191 cleanup_dead_labels ();
192
193 /* Group case nodes to reduce the number of edges.
194 We do this after cleaning up dead labels because otherwise we miss
195 a lot of obvious case merging opportunities. */
196 group_case_labels ();
197
198 /* Create the edges of the flowgraph. */
199 make_edges ();
200 cleanup_dead_labels ();
201
202 /* Debugging dumps. */
203
204 /* Write the flowgraph to a VCG file. */
205 {
206 int local_dump_flags;
207 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
208 if (vcg_file)
209 {
210 tree_cfg2vcg (vcg_file);
211 dump_end (TDI_vcg, vcg_file);
212 }
213 }
214
215 #ifdef ENABLE_CHECKING
216 verify_stmts ();
217 #endif
218
219 /* Dump a textual representation of the flowgraph. */
220 if (dump_file)
221 dump_tree_cfg (dump_file, dump_flags);
222 }
223
224 static unsigned int
225 execute_build_cfg (void)
226 {
227 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
228 return 0;
229 }
230
231 struct gimple_opt_pass pass_build_cfg =
232 {
233 {
234 GIMPLE_PASS,
235 "cfg", /* name */
236 NULL, /* gate */
237 execute_build_cfg, /* execute */
238 NULL, /* sub */
239 NULL, /* next */
240 0, /* static_pass_number */
241 TV_TREE_CFG, /* tv_id */
242 PROP_gimple_leh, /* properties_required */
243 PROP_cfg, /* properties_provided */
244 0, /* properties_destroyed */
245 0, /* todo_flags_start */
246 TODO_verify_stmts | TODO_cleanup_cfg /* todo_flags_finish */
247 }
248 };
249
250 /* Search the CFG for any computed gotos. If found, factor them to a
251 common computed goto site. Also record the location of that site so
252 that we can un-factor the gotos after we have converted back to
253 normal form. */
254
255 static void
256 factor_computed_gotos (void)
257 {
258 basic_block bb;
259 tree factored_label_decl = NULL;
260 tree var = NULL;
261 tree factored_computed_goto_label = NULL;
262 tree factored_computed_goto = NULL;
263
264 /* We know there are one or more computed gotos in this function.
265 Examine the last statement in each basic block to see if the block
266 ends with a computed goto. */
267
268 FOR_EACH_BB (bb)
269 {
270 block_stmt_iterator bsi = bsi_last (bb);
271 tree last;
272
273 if (bsi_end_p (bsi))
274 continue;
275 last = bsi_stmt (bsi);
276
277 /* Ignore the computed goto we create when we factor the original
278 computed gotos. */
279 if (last == factored_computed_goto)
280 continue;
281
282 /* If the last statement is a computed goto, factor it. */
283 if (computed_goto_p (last))
284 {
285 tree assignment;
286
287 /* The first time we find a computed goto we need to create
288 the factored goto block and the variable each original
289 computed goto will use for their goto destination. */
290 if (! factored_computed_goto)
291 {
292 basic_block new_bb = create_empty_bb (bb);
293 block_stmt_iterator new_bsi = bsi_start (new_bb);
294
295 /* Create the destination of the factored goto. Each original
296 computed goto will put its desired destination into this
297 variable and jump to the label we create immediately
298 below. */
299 var = create_tmp_var (ptr_type_node, "gotovar");
300
301 /* Build a label for the new block which will contain the
302 factored computed goto. */
303 factored_label_decl = create_artificial_label ();
304 factored_computed_goto_label
305 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
306 bsi_insert_after (&new_bsi, factored_computed_goto_label,
307 BSI_NEW_STMT);
308
309 /* Build our new computed goto. */
310 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
311 bsi_insert_after (&new_bsi, factored_computed_goto,
312 BSI_NEW_STMT);
313 }
314
315 /* Copy the original computed goto's destination into VAR. */
316 assignment = build_gimple_modify_stmt (var,
317 GOTO_DESTINATION (last));
318 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
319
320 /* And re-vector the computed goto to the new destination. */
321 GOTO_DESTINATION (last) = factored_label_decl;
322 }
323 }
324 }
325
326
327 /* Build a flowgraph for the statement_list STMT_LIST. */
328
329 static void
330 make_blocks (tree stmt_list)
331 {
332 tree_stmt_iterator i = tsi_start (stmt_list);
333 tree stmt = NULL;
334 bool start_new_block = true;
335 bool first_stmt_of_list = true;
336 basic_block bb = ENTRY_BLOCK_PTR;
337
338 while (!tsi_end_p (i))
339 {
340 tree prev_stmt;
341
342 prev_stmt = stmt;
343 stmt = tsi_stmt (i);
344
345 /* If the statement starts a new basic block or if we have determined
346 in a previous pass that we need to create a new block for STMT, do
347 so now. */
348 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
349 {
350 if (!first_stmt_of_list)
351 stmt_list = tsi_split_statement_list_before (&i);
352 bb = create_basic_block (stmt_list, NULL, bb);
353 start_new_block = false;
354 }
355
356 /* Now add STMT to BB and create the subgraphs for special statement
357 codes. */
358 set_bb_for_stmt (stmt, bb);
359
360 if (computed_goto_p (stmt))
361 found_computed_goto = true;
362
363 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
364 next iteration. */
365 if (stmt_ends_bb_p (stmt))
366 start_new_block = true;
367
368 tsi_next (&i);
369 first_stmt_of_list = false;
370 }
371 }
372
373
374 /* Create and return a new empty basic block after bb AFTER. */
375
376 static basic_block
377 create_bb (void *h, void *e, basic_block after)
378 {
379 basic_block bb;
380
381 gcc_assert (!e);
382
383 /* Create and initialize a new basic block. Since alloc_block uses
384 ggc_alloc_cleared to allocate a basic block, we do not have to
385 clear the newly allocated basic block here. */
386 bb = alloc_block ();
387
388 bb->index = last_basic_block;
389 bb->flags = BB_NEW;
390 bb->il.tree = GGC_CNEW (struct tree_bb_info);
391 set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
392
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
395
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
398 {
399 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
400 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
401 }
402
403 /* Add the newly created block to the array. */
404 SET_BASIC_BLOCK (last_basic_block, bb);
405
406 n_basic_blocks++;
407 last_basic_block++;
408
409 return bb;
410 }
411
412
413 /*---------------------------------------------------------------------------
414 Edge creation
415 ---------------------------------------------------------------------------*/
416
417 /* Fold COND_EXPR_COND of each COND_EXPR. */
418
419 void
420 fold_cond_expr_cond (void)
421 {
422 basic_block bb;
423
424 FOR_EACH_BB (bb)
425 {
426 tree stmt = last_stmt (bb);
427
428 if (stmt
429 && TREE_CODE (stmt) == COND_EXPR)
430 {
431 tree cond;
432 bool zerop, onep;
433
434 fold_defer_overflow_warnings ();
435 cond = fold (COND_EXPR_COND (stmt));
436 zerop = integer_zerop (cond);
437 onep = integer_onep (cond);
438 fold_undefer_overflow_warnings (zerop || onep,
439 stmt,
440 WARN_STRICT_OVERFLOW_CONDITIONAL);
441 if (zerop)
442 COND_EXPR_COND (stmt) = boolean_false_node;
443 else if (onep)
444 COND_EXPR_COND (stmt) = boolean_true_node;
445 }
446 }
447 }
448
449 /* Join all the blocks in the flowgraph. */
450
451 static void
452 make_edges (void)
453 {
454 basic_block bb;
455 struct omp_region *cur_region = NULL;
456
457 /* Create an edge from entry to the first block with executable
458 statements in it. */
459 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
460
461 /* Traverse the basic block array placing edges. */
462 FOR_EACH_BB (bb)
463 {
464 tree last = last_stmt (bb);
465 bool fallthru;
466
467 if (last)
468 {
469 enum tree_code code = TREE_CODE (last);
470 switch (code)
471 {
472 case GOTO_EXPR:
473 make_goto_expr_edges (bb);
474 fallthru = false;
475 break;
476 case RETURN_EXPR:
477 make_edge (bb, EXIT_BLOCK_PTR, 0);
478 fallthru = false;
479 break;
480 case COND_EXPR:
481 make_cond_expr_edges (bb);
482 fallthru = false;
483 break;
484 case SWITCH_EXPR:
485 make_switch_expr_edges (bb);
486 fallthru = false;
487 break;
488 case RESX_EXPR:
489 make_eh_edges (last);
490 fallthru = false;
491 break;
492
493 case CALL_EXPR:
494 /* If this function receives a nonlocal goto, then we need to
495 make edges from this call site to all the nonlocal goto
496 handlers. */
497 if (tree_can_make_abnormal_goto (last))
498 make_abnormal_goto_edges (bb, true);
499
500 /* If this statement has reachable exception handlers, then
501 create abnormal edges to them. */
502 make_eh_edges (last);
503
504 /* Some calls are known not to return. */
505 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
506 break;
507
508 case MODIFY_EXPR:
509 gcc_unreachable ();
510
511 case GIMPLE_MODIFY_STMT:
512 if (is_ctrl_altering_stmt (last))
513 {
514 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
515 the CALL_EXPR may have an abnormal edge. Search the RHS
516 for this case and create any required edges. */
517 if (tree_can_make_abnormal_goto (last))
518 make_abnormal_goto_edges (bb, true);
519
520 make_eh_edges (last);
521 }
522 fallthru = true;
523 break;
524
525 case OMP_PARALLEL:
526 case OMP_TASK:
527 case OMP_FOR:
528 case OMP_SINGLE:
529 case OMP_MASTER:
530 case OMP_ORDERED:
531 case OMP_CRITICAL:
532 case OMP_SECTION:
533 cur_region = new_omp_region (bb, code, cur_region);
534 fallthru = true;
535 break;
536
537 case OMP_SECTIONS:
538 cur_region = new_omp_region (bb, code, cur_region);
539 fallthru = true;
540 break;
541
542 case OMP_SECTIONS_SWITCH:
543 fallthru = false;
544 break;
545
546
547 case OMP_ATOMIC_LOAD:
548 case OMP_ATOMIC_STORE:
549 fallthru = true;
550 break;
551
552
553 case OMP_RETURN:
554 /* In the case of an OMP_SECTION, the edge will go somewhere
555 other than the next block. This will be created later. */
556 cur_region->exit = bb;
557 fallthru = cur_region->type != OMP_SECTION;
558 cur_region = cur_region->outer;
559 break;
560
561 case OMP_CONTINUE:
562 cur_region->cont = bb;
563 switch (cur_region->type)
564 {
565 case OMP_FOR:
566 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
567 to prevent splitting them. */
568 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
569 /* Make the loopback edge. */
570 make_edge (bb, single_succ (cur_region->entry),
571 EDGE_ABNORMAL);
572
573 /* Create an edge from OMP_FOR to exit, which corresponds to
574 the case that the body of the loop is not executed at
575 all. */
576 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
577 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
578 fallthru = false;
579 break;
580
581 case OMP_SECTIONS:
582 /* Wire up the edges into and out of the nested sections. */
583 {
584 basic_block switch_bb = single_succ (cur_region->entry);
585
586 struct omp_region *i;
587 for (i = cur_region->inner; i ; i = i->next)
588 {
589 gcc_assert (i->type == OMP_SECTION);
590 make_edge (switch_bb, i->entry, 0);
591 make_edge (i->exit, bb, EDGE_FALLTHRU);
592 }
593
594 /* Make the loopback edge to the block with
595 OMP_SECTIONS_SWITCH. */
596 make_edge (bb, switch_bb, 0);
597
598 /* Make the edge from the switch to exit. */
599 make_edge (switch_bb, bb->next_bb, 0);
600 fallthru = false;
601 }
602 break;
603
604 default:
605 gcc_unreachable ();
606 }
607 break;
608
609 default:
610 gcc_assert (!stmt_ends_bb_p (last));
611 fallthru = true;
612 }
613 }
614 else
615 fallthru = true;
616
617 if (fallthru)
618 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
619 }
620
621 if (root_omp_region)
622 free_omp_regions ();
623
624 /* Fold COND_EXPR_COND of each COND_EXPR. */
625 fold_cond_expr_cond ();
626 }
627
628
629 /* Create the edges for a COND_EXPR starting at block BB.
630 At this point, both clauses must contain only simple gotos. */
631
632 static void
633 make_cond_expr_edges (basic_block bb)
634 {
635 tree entry = last_stmt (bb);
636 basic_block then_bb, else_bb;
637 tree then_label, else_label;
638 edge e;
639
640 gcc_assert (entry);
641 gcc_assert (TREE_CODE (entry) == COND_EXPR);
642
643 /* Entry basic blocks for each component. */
644 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
645 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
646 then_bb = label_to_block (then_label);
647 else_bb = label_to_block (else_label);
648
649 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
650 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
651 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
652 if (e)
653 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
654
655 /* We do not need the gotos anymore. */
656 COND_EXPR_THEN (entry) = NULL_TREE;
657 COND_EXPR_ELSE (entry) = NULL_TREE;
658 }
659
660
661 /* Called for each element in the hash table (P) as we delete the
662 edge to cases hash table.
663
664 Clear all the TREE_CHAINs to prevent problems with copying of
665 SWITCH_EXPRs and structure sharing rules, then free the hash table
666 element. */
667
668 static bool
669 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
670 void *data ATTRIBUTE_UNUSED)
671 {
672 tree t, next;
673
674 for (t = (tree) *value; t; t = next)
675 {
676 next = TREE_CHAIN (t);
677 TREE_CHAIN (t) = NULL;
678 }
679
680 *value = NULL;
681 return false;
682 }
683
684 /* Start recording information mapping edges to case labels. */
685
686 void
687 start_recording_case_labels (void)
688 {
689 gcc_assert (edge_to_cases == NULL);
690 edge_to_cases = pointer_map_create ();
691 }
692
693 /* Return nonzero if we are recording information for case labels. */
694
695 static bool
696 recording_case_labels_p (void)
697 {
698 return (edge_to_cases != NULL);
699 }
700
701 /* Stop recording information mapping edges to case labels and
702 remove any information we have recorded. */
703 void
704 end_recording_case_labels (void)
705 {
706 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
707 pointer_map_destroy (edge_to_cases);
708 edge_to_cases = NULL;
709 }
710
711 /* If we are inside a {start,end}_recording_cases block, then return
712 a chain of CASE_LABEL_EXPRs from T which reference E.
713
714 Otherwise return NULL. */
715
716 static tree
717 get_cases_for_edge (edge e, tree t)
718 {
719 void **slot;
720 size_t i, n;
721 tree vec;
722
723 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
724 chains available. Return NULL so the caller can detect this case. */
725 if (!recording_case_labels_p ())
726 return NULL;
727
728 slot = pointer_map_contains (edge_to_cases, e);
729 if (slot)
730 return (tree) *slot;
731
732 /* If we did not find E in the hash table, then this must be the first
733 time we have been queried for information about E & T. Add all the
734 elements from T to the hash table then perform the query again. */
735
736 vec = SWITCH_LABELS (t);
737 n = TREE_VEC_LENGTH (vec);
738 for (i = 0; i < n; i++)
739 {
740 tree elt = TREE_VEC_ELT (vec, i);
741 tree lab = CASE_LABEL (elt);
742 basic_block label_bb = label_to_block (lab);
743 edge this_edge = find_edge (e->src, label_bb);
744
745 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
746 a new chain. */
747 slot = pointer_map_insert (edge_to_cases, this_edge);
748 TREE_CHAIN (elt) = (tree) *slot;
749 *slot = elt;
750 }
751
752 return (tree) *pointer_map_contains (edge_to_cases, e);
753 }
754
755 /* Create the edges for a SWITCH_EXPR starting at block BB.
756 At this point, the switch body has been lowered and the
757 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
758
759 static void
760 make_switch_expr_edges (basic_block bb)
761 {
762 tree entry = last_stmt (bb);
763 size_t i, n;
764 tree vec;
765
766 vec = SWITCH_LABELS (entry);
767 n = TREE_VEC_LENGTH (vec);
768
769 for (i = 0; i < n; ++i)
770 {
771 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
772 basic_block label_bb = label_to_block (lab);
773 make_edge (bb, label_bb, 0);
774 }
775 }
776
777
778 /* Return the basic block holding label DEST. */
779
780 basic_block
781 label_to_block_fn (struct function *ifun, tree dest)
782 {
783 int uid = LABEL_DECL_UID (dest);
784
785 /* We would die hard when faced by an undefined label. Emit a label to
786 the very first basic block. This will hopefully make even the dataflow
787 and undefined variable warnings quite right. */
788 if ((errorcount || sorrycount) && uid < 0)
789 {
790 block_stmt_iterator bsi =
791 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
792 tree stmt;
793
794 stmt = build1 (LABEL_EXPR, void_type_node, dest);
795 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
796 uid = LABEL_DECL_UID (dest);
797 }
798 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
799 <= (unsigned int) uid)
800 return NULL;
801 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
802 }
803
804 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
805 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
806
807 void
808 make_abnormal_goto_edges (basic_block bb, bool for_call)
809 {
810 basic_block target_bb;
811 block_stmt_iterator bsi;
812
813 FOR_EACH_BB (target_bb)
814 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
815 {
816 tree target = bsi_stmt (bsi);
817
818 if (TREE_CODE (target) != LABEL_EXPR)
819 break;
820
821 target = LABEL_EXPR_LABEL (target);
822
823 /* Make an edge to every label block that has been marked as a
824 potential target for a computed goto or a non-local goto. */
825 if ((FORCED_LABEL (target) && !for_call)
826 || (DECL_NONLOCAL (target) && for_call))
827 {
828 make_edge (bb, target_bb, EDGE_ABNORMAL);
829 break;
830 }
831 }
832 }
833
834 /* Create edges for a goto statement at block BB. */
835
836 static void
837 make_goto_expr_edges (basic_block bb)
838 {
839 block_stmt_iterator last = bsi_last (bb);
840 tree goto_t = bsi_stmt (last);
841
842 /* A simple GOTO creates normal edges. */
843 if (simple_goto_p (goto_t))
844 {
845 tree dest = GOTO_DESTINATION (goto_t);
846 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
847 e->goto_locus = EXPR_LOCATION (goto_t);
848 bsi_remove (&last, true);
849 return;
850 }
851
852 /* A computed GOTO creates abnormal edges. */
853 make_abnormal_goto_edges (bb, false);
854 }
855
856
857 /*---------------------------------------------------------------------------
858 Flowgraph analysis
859 ---------------------------------------------------------------------------*/
860
861 /* Cleanup useless labels in basic blocks. This is something we wish
862 to do early because it allows us to group case labels before creating
863 the edges for the CFG, and it speeds up block statement iterators in
864 all passes later on.
865 We rerun this pass after CFG is created, to get rid of the labels that
866 are no longer referenced. After then we do not run it any more, since
867 (almost) no new labels should be created. */
868
869 /* A map from basic block index to the leading label of that block. */
870 static struct label_record
871 {
872 /* The label. */
873 tree label;
874
875 /* True if the label is referenced from somewhere. */
876 bool used;
877 } *label_for_bb;
878
879 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
880 static void
881 update_eh_label (struct eh_region *region)
882 {
883 tree old_label = get_eh_region_tree_label (region);
884 if (old_label)
885 {
886 tree new_label;
887 basic_block bb = label_to_block (old_label);
888
889 /* ??? After optimizing, there may be EH regions with labels
890 that have already been removed from the function body, so
891 there is no basic block for them. */
892 if (! bb)
893 return;
894
895 new_label = label_for_bb[bb->index].label;
896 label_for_bb[bb->index].used = true;
897 set_eh_region_tree_label (region, new_label);
898 }
899 }
900
901 /* Given LABEL return the first label in the same basic block. */
902 static tree
903 main_block_label (tree label)
904 {
905 basic_block bb = label_to_block (label);
906 tree main_label = label_for_bb[bb->index].label;
907
908 /* label_to_block possibly inserted undefined label into the chain. */
909 if (!main_label)
910 {
911 label_for_bb[bb->index].label = label;
912 main_label = label;
913 }
914
915 label_for_bb[bb->index].used = true;
916 return main_label;
917 }
918
919 /* Cleanup redundant labels. This is a three-step process:
920 1) Find the leading label for each block.
921 2) Redirect all references to labels to the leading labels.
922 3) Cleanup all useless labels. */
923
924 void
925 cleanup_dead_labels (void)
926 {
927 basic_block bb;
928 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
929
930 /* Find a suitable label for each block. We use the first user-defined
931 label if there is one, or otherwise just the first label we see. */
932 FOR_EACH_BB (bb)
933 {
934 block_stmt_iterator i;
935
936 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
937 {
938 tree label, stmt = bsi_stmt (i);
939
940 if (TREE_CODE (stmt) != LABEL_EXPR)
941 break;
942
943 label = LABEL_EXPR_LABEL (stmt);
944
945 /* If we have not yet seen a label for the current block,
946 remember this one and see if there are more labels. */
947 if (!label_for_bb[bb->index].label)
948 {
949 label_for_bb[bb->index].label = label;
950 continue;
951 }
952
953 /* If we did see a label for the current block already, but it
954 is an artificially created label, replace it if the current
955 label is a user defined label. */
956 if (!DECL_ARTIFICIAL (label)
957 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
958 {
959 label_for_bb[bb->index].label = label;
960 break;
961 }
962 }
963 }
964
965 /* Now redirect all jumps/branches to the selected label.
966 First do so for each block ending in a control statement. */
967 FOR_EACH_BB (bb)
968 {
969 tree stmt = last_stmt (bb);
970 if (!stmt)
971 continue;
972
973 switch (TREE_CODE (stmt))
974 {
975 case COND_EXPR:
976 {
977 tree true_branch, false_branch;
978
979 true_branch = COND_EXPR_THEN (stmt);
980 false_branch = COND_EXPR_ELSE (stmt);
981
982 if (true_branch)
983 GOTO_DESTINATION (true_branch)
984 = main_block_label (GOTO_DESTINATION (true_branch));
985 if (false_branch)
986 GOTO_DESTINATION (false_branch)
987 = main_block_label (GOTO_DESTINATION (false_branch));
988
989 break;
990 }
991
992 case SWITCH_EXPR:
993 {
994 size_t i;
995 tree vec = SWITCH_LABELS (stmt);
996 size_t n = TREE_VEC_LENGTH (vec);
997
998 /* Replace all destination labels. */
999 for (i = 0; i < n; ++i)
1000 {
1001 tree elt = TREE_VEC_ELT (vec, i);
1002 tree label = main_block_label (CASE_LABEL (elt));
1003 CASE_LABEL (elt) = label;
1004 }
1005 break;
1006 }
1007
1008 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1009 remove them until after we've created the CFG edges. */
1010 case GOTO_EXPR:
1011 if (! computed_goto_p (stmt))
1012 {
1013 GOTO_DESTINATION (stmt)
1014 = main_block_label (GOTO_DESTINATION (stmt));
1015 break;
1016 }
1017
1018 default:
1019 break;
1020 }
1021 }
1022
1023 for_each_eh_region (update_eh_label);
1024
1025 /* Finally, purge dead labels. All user-defined labels and labels that
1026 can be the target of non-local gotos and labels which have their
1027 address taken are preserved. */
1028 FOR_EACH_BB (bb)
1029 {
1030 block_stmt_iterator i;
1031 tree label_for_this_bb = label_for_bb[bb->index].label;
1032
1033 if (!label_for_this_bb)
1034 continue;
1035
1036 /* If the main label of the block is unused, we may still remove it. */
1037 if (!label_for_bb[bb->index].used)
1038 label_for_this_bb = NULL;
1039
1040 for (i = bsi_start (bb); !bsi_end_p (i); )
1041 {
1042 tree label, stmt = bsi_stmt (i);
1043
1044 if (TREE_CODE (stmt) != LABEL_EXPR)
1045 break;
1046
1047 label = LABEL_EXPR_LABEL (stmt);
1048
1049 if (label == label_for_this_bb
1050 || ! DECL_ARTIFICIAL (label)
1051 || DECL_NONLOCAL (label)
1052 || FORCED_LABEL (label))
1053 bsi_next (&i);
1054 else
1055 bsi_remove (&i, true);
1056 }
1057 }
1058
1059 free (label_for_bb);
1060 }
1061
1062 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1063 and scan the sorted vector of cases. Combine the ones jumping to the
1064 same label.
1065 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1066
1067 void
1068 group_case_labels (void)
1069 {
1070 basic_block bb;
1071
1072 FOR_EACH_BB (bb)
1073 {
1074 tree stmt = last_stmt (bb);
1075 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1076 {
1077 tree labels = SWITCH_LABELS (stmt);
1078 int old_size = TREE_VEC_LENGTH (labels);
1079 int i, j, new_size = old_size;
1080 tree default_case = NULL_TREE;
1081 tree default_label = NULL_TREE;
1082
1083 /* The default label is always the last case in a switch
1084 statement after gimplification if it was not optimized
1085 away. */
1086 if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
1087 && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
1088 {
1089 default_case = TREE_VEC_ELT (labels, old_size - 1);
1090 default_label = CASE_LABEL (default_case);
1091 old_size--;
1092 }
1093
1094 /* Look for possible opportunities to merge cases. */
1095 i = 0;
1096 while (i < old_size)
1097 {
1098 tree base_case, base_label, base_high;
1099 base_case = TREE_VEC_ELT (labels, i);
1100
1101 gcc_assert (base_case);
1102 base_label = CASE_LABEL (base_case);
1103
1104 /* Discard cases that have the same destination as the
1105 default case. */
1106 if (base_label == default_label)
1107 {
1108 TREE_VEC_ELT (labels, i) = NULL_TREE;
1109 i++;
1110 new_size--;
1111 continue;
1112 }
1113
1114 base_high = CASE_HIGH (base_case) ?
1115 CASE_HIGH (base_case) : CASE_LOW (base_case);
1116 i++;
1117 /* Try to merge case labels. Break out when we reach the end
1118 of the label vector or when we cannot merge the next case
1119 label with the current one. */
1120 while (i < old_size)
1121 {
1122 tree merge_case = TREE_VEC_ELT (labels, i);
1123 tree merge_label = CASE_LABEL (merge_case);
1124 tree t = int_const_binop (PLUS_EXPR, base_high,
1125 integer_one_node, 1);
1126
1127 /* Merge the cases if they jump to the same place,
1128 and their ranges are consecutive. */
1129 if (merge_label == base_label
1130 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1131 {
1132 base_high = CASE_HIGH (merge_case) ?
1133 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1134 CASE_HIGH (base_case) = base_high;
1135 TREE_VEC_ELT (labels, i) = NULL_TREE;
1136 new_size--;
1137 i++;
1138 }
1139 else
1140 break;
1141 }
1142 }
1143
1144 /* Compress the case labels in the label vector, and adjust the
1145 length of the vector. */
1146 for (i = 0, j = 0; i < new_size; i++)
1147 {
1148 while (! TREE_VEC_ELT (labels, j))
1149 j++;
1150 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1151 }
1152 TREE_VEC_LENGTH (labels) = new_size;
1153 }
1154 }
1155 }
1156
1157 /* Checks whether we can merge block B into block A. */
1158
1159 static bool
1160 tree_can_merge_blocks_p (basic_block a, basic_block b)
1161 {
1162 const_tree stmt;
1163 block_stmt_iterator bsi;
1164 tree phi;
1165
1166 if (!single_succ_p (a))
1167 return false;
1168
1169 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1170 return false;
1171
1172 if (single_succ (a) != b)
1173 return false;
1174
1175 if (!single_pred_p (b))
1176 return false;
1177
1178 if (b == EXIT_BLOCK_PTR)
1179 return false;
1180
1181 /* If A ends by a statement causing exceptions or something similar, we
1182 cannot merge the blocks. */
1183 /* This CONST_CAST is okay because last_stmt doesn't modify its
1184 argument and the return value is assign to a const_tree. */
1185 stmt = last_stmt (CONST_CAST_BB (a));
1186 if (stmt && stmt_ends_bb_p (stmt))
1187 return false;
1188
1189 /* Do not allow a block with only a non-local label to be merged. */
1190 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1191 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1192 return false;
1193
1194 /* It must be possible to eliminate all phi nodes in B. If ssa form
1195 is not up-to-date, we cannot eliminate any phis; however, if only
1196 some symbols as whole are marked for renaming, this is not a problem,
1197 as phi nodes for those symbols are irrelevant in updating anyway. */
1198 phi = phi_nodes (b);
1199 if (phi)
1200 {
1201 if (name_mappings_registered_p ())
1202 return false;
1203
1204 for (; phi; phi = PHI_CHAIN (phi))
1205 if (!is_gimple_reg (PHI_RESULT (phi))
1206 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1207 return false;
1208 }
1209
1210 /* Do not remove user labels. */
1211 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1212 {
1213 stmt = bsi_stmt (bsi);
1214 if (TREE_CODE (stmt) != LABEL_EXPR)
1215 break;
1216 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1217 return false;
1218 }
1219
1220 /* Protect the loop latches. */
1221 if (current_loops
1222 && b->loop_father->latch == b)
1223 return false;
1224
1225 return true;
1226 }
1227
1228 /* Replaces all uses of NAME by VAL. */
1229
1230 void
1231 replace_uses_by (tree name, tree val)
1232 {
1233 imm_use_iterator imm_iter;
1234 use_operand_p use;
1235 tree stmt;
1236 edge e;
1237
1238 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1239 {
1240 if (TREE_CODE (stmt) != PHI_NODE)
1241 push_stmt_changes (&stmt);
1242
1243 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1244 {
1245 replace_exp (use, val);
1246
1247 if (TREE_CODE (stmt) == PHI_NODE)
1248 {
1249 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1250 if (e->flags & EDGE_ABNORMAL)
1251 {
1252 /* This can only occur for virtual operands, since
1253 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1254 would prevent replacement. */
1255 gcc_assert (!is_gimple_reg (name));
1256 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1257 }
1258 }
1259 }
1260
1261 if (TREE_CODE (stmt) != PHI_NODE)
1262 {
1263 tree rhs;
1264
1265 fold_stmt_inplace (stmt);
1266 if (cfgcleanup_altered_bbs)
1267 bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
1268
1269 /* FIXME. This should go in pop_stmt_changes. */
1270 rhs = get_rhs (stmt);
1271 if (TREE_CODE (rhs) == ADDR_EXPR)
1272 recompute_tree_invariant_for_addr_expr (rhs);
1273
1274 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1275
1276 pop_stmt_changes (&stmt);
1277 }
1278 }
1279
1280 gcc_assert (has_zero_uses (name));
1281
1282 /* Also update the trees stored in loop structures. */
1283 if (current_loops)
1284 {
1285 struct loop *loop;
1286 loop_iterator li;
1287
1288 FOR_EACH_LOOP (li, loop, 0)
1289 {
1290 substitute_in_loop_info (loop, name, val);
1291 }
1292 }
1293 }
1294
1295 /* Merge block B into block A. */
1296
1297 static void
1298 tree_merge_blocks (basic_block a, basic_block b)
1299 {
1300 block_stmt_iterator bsi;
1301 tree_stmt_iterator last;
1302 tree phi;
1303
1304 if (dump_file)
1305 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1306
1307 /* Remove all single-valued PHI nodes from block B of the form
1308 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1309 bsi = bsi_last (a);
1310 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1311 {
1312 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1313 tree copy;
1314 bool may_replace_uses = may_propagate_copy (def, use);
1315
1316 /* In case we maintain loop closed ssa form, do not propagate arguments
1317 of loop exit phi nodes. */
1318 if (current_loops
1319 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1320 && is_gimple_reg (def)
1321 && TREE_CODE (use) == SSA_NAME
1322 && a->loop_father != b->loop_father)
1323 may_replace_uses = false;
1324
1325 if (!may_replace_uses)
1326 {
1327 gcc_assert (is_gimple_reg (def));
1328
1329 /* Note that just emitting the copies is fine -- there is no problem
1330 with ordering of phi nodes. This is because A is the single
1331 predecessor of B, therefore results of the phi nodes cannot
1332 appear as arguments of the phi nodes. */
1333 copy = build_gimple_modify_stmt (def, use);
1334 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1335 SSA_NAME_DEF_STMT (def) = copy;
1336 remove_phi_node (phi, NULL, false);
1337 }
1338 else
1339 {
1340 /* If we deal with a PHI for virtual operands, we can simply
1341 propagate these without fussing with folding or updating
1342 the stmt. */
1343 if (!is_gimple_reg (def))
1344 {
1345 imm_use_iterator iter;
1346 use_operand_p use_p;
1347 tree stmt;
1348
1349 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1350 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1351 SET_USE (use_p, use);
1352 }
1353 else
1354 replace_uses_by (def, use);
1355 remove_phi_node (phi, NULL, true);
1356 }
1357 }
1358
1359 /* Ensure that B follows A. */
1360 move_block_after (b, a);
1361
1362 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1363 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1364
1365 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1366 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1367 {
1368 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1369 {
1370 tree label = bsi_stmt (bsi);
1371
1372 bsi_remove (&bsi, false);
1373 /* Now that we can thread computed gotos, we might have
1374 a situation where we have a forced label in block B
1375 However, the label at the start of block B might still be
1376 used in other ways (think about the runtime checking for
1377 Fortran assigned gotos). So we can not just delete the
1378 label. Instead we move the label to the start of block A. */
1379 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1380 {
1381 block_stmt_iterator dest_bsi = bsi_start (a);
1382 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1383 }
1384 }
1385 else
1386 {
1387 change_bb_for_stmt (bsi_stmt (bsi), a);
1388 bsi_next (&bsi);
1389 }
1390 }
1391
1392 /* Merge the chains. */
1393 last = tsi_last (bb_stmt_list (a));
1394 tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
1395 set_bb_stmt_list (b, NULL_TREE);
1396
1397 if (cfgcleanup_altered_bbs)
1398 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1399 }
1400
1401
1402 /* Return the one of two successors of BB that is not reachable by a
1403 reached by a complex edge, if there is one. Else, return BB. We use
1404 this in optimizations that use post-dominators for their heuristics,
1405 to catch the cases in C++ where function calls are involved. */
1406
1407 basic_block
1408 single_noncomplex_succ (basic_block bb)
1409 {
1410 edge e0, e1;
1411 if (EDGE_COUNT (bb->succs) != 2)
1412 return bb;
1413
1414 e0 = EDGE_SUCC (bb, 0);
1415 e1 = EDGE_SUCC (bb, 1);
1416 if (e0->flags & EDGE_COMPLEX)
1417 return e1->dest;
1418 if (e1->flags & EDGE_COMPLEX)
1419 return e0->dest;
1420
1421 return bb;
1422 }
1423
1424
1425 /* Walk the function tree removing unnecessary statements.
1426
1427 * Empty statement nodes are removed
1428
1429 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1430
1431 * Unnecessary COND_EXPRs are removed
1432
1433 * Some unnecessary BIND_EXPRs are removed
1434
1435 Clearly more work could be done. The trick is doing the analysis
1436 and removal fast enough to be a net improvement in compile times.
1437
1438 Note that when we remove a control structure such as a COND_EXPR
1439 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1440 to ensure we eliminate all the useless code. */
1441
1442 struct rus_data
1443 {
1444 tree *last_goto;
1445 bool repeat;
1446 bool may_throw;
1447 bool may_branch;
1448 bool has_label;
1449 };
1450
1451 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1452
1453 static bool
1454 remove_useless_stmts_warn_notreached (tree stmt)
1455 {
1456 if (EXPR_HAS_LOCATION (stmt))
1457 {
1458 location_t loc = EXPR_LOCATION (stmt);
1459 if (LOCATION_LINE (loc) > 0)
1460 {
1461 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1462 return true;
1463 }
1464 }
1465
1466 switch (TREE_CODE (stmt))
1467 {
1468 case STATEMENT_LIST:
1469 {
1470 tree_stmt_iterator i;
1471 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1472 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1473 return true;
1474 }
1475 break;
1476
1477 case COND_EXPR:
1478 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1479 return true;
1480 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1481 return true;
1482 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1483 return true;
1484 break;
1485
1486 case TRY_FINALLY_EXPR:
1487 case TRY_CATCH_EXPR:
1488 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1489 return true;
1490 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1491 return true;
1492 break;
1493
1494 case CATCH_EXPR:
1495 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1496 case EH_FILTER_EXPR:
1497 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1498 case BIND_EXPR:
1499 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1500
1501 default:
1502 /* Not a live container. */
1503 break;
1504 }
1505
1506 return false;
1507 }
1508
1509 static void
1510 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1511 {
1512 tree then_clause, else_clause, cond;
1513 bool save_has_label, then_has_label, else_has_label;
1514
1515 save_has_label = data->has_label;
1516 data->has_label = false;
1517 data->last_goto = NULL;
1518
1519 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1520
1521 then_has_label = data->has_label;
1522 data->has_label = false;
1523 data->last_goto = NULL;
1524
1525 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1526
1527 else_has_label = data->has_label;
1528 data->has_label = save_has_label | then_has_label | else_has_label;
1529
1530 then_clause = COND_EXPR_THEN (*stmt_p);
1531 else_clause = COND_EXPR_ELSE (*stmt_p);
1532 cond = fold (COND_EXPR_COND (*stmt_p));
1533
1534 /* If neither arm does anything at all, we can remove the whole IF. */
1535 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1536 {
1537 *stmt_p = build_empty_stmt ();
1538 data->repeat = true;
1539 }
1540
1541 /* If there are no reachable statements in an arm, then we can
1542 zap the entire conditional. */
1543 else if (integer_nonzerop (cond) && !else_has_label)
1544 {
1545 if (warn_notreached)
1546 remove_useless_stmts_warn_notreached (else_clause);
1547 *stmt_p = then_clause;
1548 data->repeat = true;
1549 }
1550 else if (integer_zerop (cond) && !then_has_label)
1551 {
1552 if (warn_notreached)
1553 remove_useless_stmts_warn_notreached (then_clause);
1554 *stmt_p = else_clause;
1555 data->repeat = true;
1556 }
1557
1558 /* Check a couple of simple things on then/else with single stmts. */
1559 else
1560 {
1561 tree then_stmt = expr_only (then_clause);
1562 tree else_stmt = expr_only (else_clause);
1563
1564 /* Notice branches to a common destination. */
1565 if (then_stmt && else_stmt
1566 && TREE_CODE (then_stmt) == GOTO_EXPR
1567 && TREE_CODE (else_stmt) == GOTO_EXPR
1568 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1569 {
1570 *stmt_p = then_stmt;
1571 data->repeat = true;
1572 }
1573
1574 /* If the THEN/ELSE clause merely assigns a value to a variable or
1575 parameter which is already known to contain that value, then
1576 remove the useless THEN/ELSE clause. */
1577 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1578 {
1579 if (else_stmt
1580 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1581 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1582 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1583 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1584 }
1585 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1586 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1587 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1588 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1589 {
1590 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1591 ? then_stmt : else_stmt);
1592 tree *location = (TREE_CODE (cond) == EQ_EXPR
1593 ? &COND_EXPR_THEN (*stmt_p)
1594 : &COND_EXPR_ELSE (*stmt_p));
1595
1596 if (stmt
1597 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1598 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1599 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1600 *location = alloc_stmt_list ();
1601 }
1602 }
1603
1604 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1605 would be re-introduced during lowering. */
1606 data->last_goto = NULL;
1607 }
1608
1609
1610 static void
1611 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1612 {
1613 bool save_may_branch, save_may_throw;
1614 bool this_may_branch, this_may_throw;
1615
1616 /* Collect may_branch and may_throw information for the body only. */
1617 save_may_branch = data->may_branch;
1618 save_may_throw = data->may_throw;
1619 data->may_branch = false;
1620 data->may_throw = false;
1621 data->last_goto = NULL;
1622
1623 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1624
1625 this_may_branch = data->may_branch;
1626 this_may_throw = data->may_throw;
1627 data->may_branch |= save_may_branch;
1628 data->may_throw |= save_may_throw;
1629 data->last_goto = NULL;
1630
1631 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1632
1633 /* If the body is empty, then we can emit the FINALLY block without
1634 the enclosing TRY_FINALLY_EXPR. */
1635 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1636 {
1637 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1638 data->repeat = true;
1639 }
1640
1641 /* If the handler is empty, then we can emit the TRY block without
1642 the enclosing TRY_FINALLY_EXPR. */
1643 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1644 {
1645 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1646 data->repeat = true;
1647 }
1648
1649 /* If the body neither throws, nor branches, then we can safely
1650 string the TRY and FINALLY blocks together. */
1651 else if (!this_may_branch && !this_may_throw)
1652 {
1653 tree stmt = *stmt_p;
1654 *stmt_p = TREE_OPERAND (stmt, 0);
1655 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1656 data->repeat = true;
1657 }
1658 }
1659
1660
1661 static void
1662 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1663 {
1664 bool save_may_throw, this_may_throw;
1665 tree_stmt_iterator i;
1666 tree stmt;
1667
1668 /* Collect may_throw information for the body only. */
1669 save_may_throw = data->may_throw;
1670 data->may_throw = false;
1671 data->last_goto = NULL;
1672
1673 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1674
1675 this_may_throw = data->may_throw;
1676 data->may_throw = save_may_throw;
1677
1678 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1679 if (!this_may_throw)
1680 {
1681 if (warn_notreached)
1682 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1683 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1684 data->repeat = true;
1685 return;
1686 }
1687
1688 /* Process the catch clause specially. We may be able to tell that
1689 no exceptions propagate past this point. */
1690
1691 this_may_throw = true;
1692 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1693 stmt = tsi_stmt (i);
1694 data->last_goto = NULL;
1695
1696 switch (TREE_CODE (stmt))
1697 {
1698 case CATCH_EXPR:
1699 for (; !tsi_end_p (i); tsi_next (&i))
1700 {
1701 stmt = tsi_stmt (i);
1702 /* If we catch all exceptions, then the body does not
1703 propagate exceptions past this point. */
1704 if (CATCH_TYPES (stmt) == NULL)
1705 this_may_throw = false;
1706 data->last_goto = NULL;
1707 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1708 }
1709 break;
1710
1711 case EH_FILTER_EXPR:
1712 if (EH_FILTER_MUST_NOT_THROW (stmt))
1713 this_may_throw = false;
1714 else if (EH_FILTER_TYPES (stmt) == NULL)
1715 this_may_throw = false;
1716 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1717 break;
1718
1719 default:
1720 /* Otherwise this is a cleanup. */
1721 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1722
1723 /* If the cleanup is empty, then we can emit the TRY block without
1724 the enclosing TRY_CATCH_EXPR. */
1725 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1726 {
1727 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1728 data->repeat = true;
1729 }
1730 break;
1731 }
1732 data->may_throw |= this_may_throw;
1733 }
1734
1735
1736 static void
1737 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1738 {
1739 tree block;
1740
1741 /* First remove anything underneath the BIND_EXPR. */
1742 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1743
1744 /* If the BIND_EXPR has no variables, then we can pull everything
1745 up one level and remove the BIND_EXPR, unless this is the toplevel
1746 BIND_EXPR for the current function or an inlined function.
1747
1748 When this situation occurs we will want to apply this
1749 optimization again. */
1750 block = BIND_EXPR_BLOCK (*stmt_p);
1751 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1752 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1753 && (! block
1754 || ! BLOCK_ABSTRACT_ORIGIN (block)
1755 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1756 != FUNCTION_DECL)))
1757 {
1758 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1759 data->repeat = true;
1760 }
1761 }
1762
1763
1764 static void
1765 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1766 {
1767 tree dest = GOTO_DESTINATION (*stmt_p);
1768
1769 data->may_branch = true;
1770 data->last_goto = NULL;
1771
1772 /* Record the last goto expr, so that we can delete it if unnecessary. */
1773 if (TREE_CODE (dest) == LABEL_DECL)
1774 data->last_goto = stmt_p;
1775 }
1776
1777
1778 static void
1779 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1780 {
1781 tree label = LABEL_EXPR_LABEL (*stmt_p);
1782
1783 data->has_label = true;
1784
1785 /* We do want to jump across non-local label receiver code. */
1786 if (DECL_NONLOCAL (label))
1787 data->last_goto = NULL;
1788
1789 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1790 {
1791 *data->last_goto = build_empty_stmt ();
1792 data->repeat = true;
1793 }
1794
1795 /* ??? Add something here to delete unused labels. */
1796 }
1797
1798
1799 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1800 decl. This allows us to eliminate redundant or useless
1801 calls to "const" functions.
1802
1803 Gimplifier already does the same operation, but we may notice functions
1804 being const and pure once their calls has been gimplified, so we need
1805 to update the flag. */
1806
1807 static void
1808 update_call_expr_flags (tree call)
1809 {
1810 tree decl = get_callee_fndecl (call);
1811 int flags;
1812 if (!decl)
1813 return;
1814 flags = call_expr_flags (call);
1815 if (flags & (ECF_CONST | ECF_PURE) && !(flags & ECF_LOOPING_CONST_OR_PURE))
1816 TREE_SIDE_EFFECTS (call) = 0;
1817 if (TREE_NOTHROW (decl))
1818 TREE_NOTHROW (call) = 1;
1819 }
1820
1821
1822 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1823
1824 void
1825 notice_special_calls (tree t)
1826 {
1827 int flags = call_expr_flags (t);
1828
1829 if (flags & ECF_MAY_BE_ALLOCA)
1830 cfun->calls_alloca = true;
1831 if (flags & ECF_RETURNS_TWICE)
1832 cfun->calls_setjmp = true;
1833 }
1834
1835
1836 /* Clear flags set by notice_special_calls. Used by dead code removal
1837 to update the flags. */
1838
1839 void
1840 clear_special_calls (void)
1841 {
1842 cfun->calls_alloca = false;
1843 cfun->calls_setjmp = false;
1844 }
1845
1846
1847 static void
1848 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1849 {
1850 tree t = *tp, op;
1851
1852 switch (TREE_CODE (t))
1853 {
1854 case COND_EXPR:
1855 remove_useless_stmts_cond (tp, data);
1856 break;
1857
1858 case TRY_FINALLY_EXPR:
1859 remove_useless_stmts_tf (tp, data);
1860 break;
1861
1862 case TRY_CATCH_EXPR:
1863 remove_useless_stmts_tc (tp, data);
1864 break;
1865
1866 case BIND_EXPR:
1867 remove_useless_stmts_bind (tp, data);
1868 break;
1869
1870 case GOTO_EXPR:
1871 remove_useless_stmts_goto (tp, data);
1872 break;
1873
1874 case LABEL_EXPR:
1875 remove_useless_stmts_label (tp, data);
1876 break;
1877
1878 case RETURN_EXPR:
1879 fold_stmt (tp);
1880 data->last_goto = NULL;
1881 data->may_branch = true;
1882 break;
1883
1884 case CALL_EXPR:
1885 fold_stmt (tp);
1886 data->last_goto = NULL;
1887 notice_special_calls (t);
1888 update_call_expr_flags (t);
1889 if (tree_could_throw_p (t))
1890 data->may_throw = true;
1891 break;
1892
1893 case MODIFY_EXPR:
1894 gcc_unreachable ();
1895
1896 case GIMPLE_MODIFY_STMT:
1897 data->last_goto = NULL;
1898 fold_stmt (tp);
1899 op = get_call_expr_in (t);
1900 if (op)
1901 {
1902 update_call_expr_flags (op);
1903 notice_special_calls (op);
1904 }
1905 if (tree_could_throw_p (t))
1906 data->may_throw = true;
1907 break;
1908
1909 case STATEMENT_LIST:
1910 {
1911 tree_stmt_iterator i = tsi_start (t);
1912 while (!tsi_end_p (i))
1913 {
1914 t = tsi_stmt (i);
1915 if (IS_EMPTY_STMT (t))
1916 {
1917 tsi_delink (&i);
1918 continue;
1919 }
1920
1921 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1922
1923 t = tsi_stmt (i);
1924 if (TREE_CODE (t) == STATEMENT_LIST)
1925 {
1926 tsi_link_before (&i, t, TSI_SAME_STMT);
1927 tsi_delink (&i);
1928 }
1929 else
1930 tsi_next (&i);
1931 }
1932 }
1933 break;
1934 case ASM_EXPR:
1935 fold_stmt (tp);
1936 data->last_goto = NULL;
1937 break;
1938
1939 case OMP_PARALLEL:
1940 case OMP_TASK:
1941 /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
1942 as useless. */
1943 remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_TASKREG_BODY (*tp)), data);
1944 data->last_goto = NULL;
1945 break;
1946
1947 case OMP_SECTIONS:
1948 case OMP_SINGLE:
1949 case OMP_SECTION:
1950 case OMP_MASTER:
1951 case OMP_ORDERED:
1952 case OMP_CRITICAL:
1953 remove_useless_stmts_1 (&OMP_BODY (*tp), data);
1954 data->last_goto = NULL;
1955 break;
1956
1957 case OMP_FOR:
1958 remove_useless_stmts_1 (&OMP_FOR_BODY (*tp), data);
1959 data->last_goto = NULL;
1960 if (OMP_FOR_PRE_BODY (*tp))
1961 {
1962 remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp), data);
1963 data->last_goto = NULL;
1964 }
1965 break;
1966
1967 default:
1968 data->last_goto = NULL;
1969 break;
1970 }
1971 }
1972
1973 static unsigned int
1974 remove_useless_stmts (void)
1975 {
1976 struct rus_data data;
1977
1978 clear_special_calls ();
1979
1980 do
1981 {
1982 memset (&data, 0, sizeof (data));
1983 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1984 }
1985 while (data.repeat);
1986 return 0;
1987 }
1988
1989
1990 struct gimple_opt_pass pass_remove_useless_stmts =
1991 {
1992 {
1993 GIMPLE_PASS,
1994 "useless", /* name */
1995 NULL, /* gate */
1996 remove_useless_stmts, /* execute */
1997 NULL, /* sub */
1998 NULL, /* next */
1999 0, /* static_pass_number */
2000 0, /* tv_id */
2001 PROP_gimple_any, /* properties_required */
2002 0, /* properties_provided */
2003 0, /* properties_destroyed */
2004 0, /* todo_flags_start */
2005 TODO_dump_func /* todo_flags_finish */
2006 }
2007 };
2008
2009 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2010
2011 static void
2012 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2013 {
2014 tree phi;
2015
2016 /* Since this block is no longer reachable, we can just delete all
2017 of its PHI nodes. */
2018 phi = phi_nodes (bb);
2019 while (phi)
2020 {
2021 tree next = PHI_CHAIN (phi);
2022 remove_phi_node (phi, NULL_TREE, true);
2023 phi = next;
2024 }
2025
2026 /* Remove edges to BB's successors. */
2027 while (EDGE_COUNT (bb->succs) > 0)
2028 remove_edge (EDGE_SUCC (bb, 0));
2029 }
2030
2031
2032 /* Remove statements of basic block BB. */
2033
2034 static void
2035 remove_bb (basic_block bb)
2036 {
2037 block_stmt_iterator i;
2038 source_location loc = UNKNOWN_LOCATION;
2039
2040 if (dump_file)
2041 {
2042 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2043 if (dump_flags & TDF_DETAILS)
2044 {
2045 dump_bb (bb, dump_file, 0);
2046 fprintf (dump_file, "\n");
2047 }
2048 }
2049
2050 if (current_loops)
2051 {
2052 struct loop *loop = bb->loop_father;
2053
2054 /* If a loop gets removed, clean up the information associated
2055 with it. */
2056 if (loop->latch == bb
2057 || loop->header == bb)
2058 free_numbers_of_iterations_estimates_loop (loop);
2059 }
2060
2061 /* Remove all the instructions in the block. */
2062 if (bb_stmt_list (bb) != NULL_TREE)
2063 {
2064 for (i = bsi_start (bb); !bsi_end_p (i);)
2065 {
2066 tree stmt = bsi_stmt (i);
2067 if (TREE_CODE (stmt) == LABEL_EXPR
2068 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2069 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2070 {
2071 basic_block new_bb;
2072 block_stmt_iterator new_bsi;
2073
2074 /* A non-reachable non-local label may still be referenced.
2075 But it no longer needs to carry the extra semantics of
2076 non-locality. */
2077 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2078 {
2079 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2080 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2081 }
2082
2083 new_bb = bb->prev_bb;
2084 new_bsi = bsi_start (new_bb);
2085 bsi_remove (&i, false);
2086 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2087 }
2088 else
2089 {
2090 /* Release SSA definitions if we are in SSA. Note that we
2091 may be called when not in SSA. For example,
2092 final_cleanup calls this function via
2093 cleanup_tree_cfg. */
2094 if (gimple_in_ssa_p (cfun))
2095 release_defs (stmt);
2096
2097 bsi_remove (&i, true);
2098 }
2099
2100 /* Don't warn for removed gotos. Gotos are often removed due to
2101 jump threading, thus resulting in bogus warnings. Not great,
2102 since this way we lose warnings for gotos in the original
2103 program that are indeed unreachable. */
2104 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2105 {
2106 if (EXPR_HAS_LOCATION (stmt))
2107 loc = EXPR_LOCATION (stmt);
2108 }
2109 }
2110 }
2111
2112 /* If requested, give a warning that the first statement in the
2113 block is unreachable. We walk statements backwards in the
2114 loop above, so the last statement we process is the first statement
2115 in the block. */
2116 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2117 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2118
2119 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2120 bb->il.tree = NULL;
2121 }
2122
2123
2124 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2125 predicate VAL, return the edge that will be taken out of the block.
2126 If VAL does not match a unique edge, NULL is returned. */
2127
2128 edge
2129 find_taken_edge (basic_block bb, tree val)
2130 {
2131 tree stmt;
2132
2133 stmt = last_stmt (bb);
2134
2135 gcc_assert (stmt);
2136 gcc_assert (is_ctrl_stmt (stmt));
2137 gcc_assert (val);
2138
2139 if (! is_gimple_min_invariant (val))
2140 return NULL;
2141
2142 if (TREE_CODE (stmt) == COND_EXPR)
2143 return find_taken_edge_cond_expr (bb, val);
2144
2145 if (TREE_CODE (stmt) == SWITCH_EXPR)
2146 return find_taken_edge_switch_expr (bb, val);
2147
2148 if (computed_goto_p (stmt))
2149 {
2150 /* Only optimize if the argument is a label, if the argument is
2151 not a label then we can not construct a proper CFG.
2152
2153 It may be the case that we only need to allow the LABEL_REF to
2154 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2155 appear inside a LABEL_EXPR just to be safe. */
2156 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2157 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2158 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2159 return NULL;
2160 }
2161
2162 gcc_unreachable ();
2163 }
2164
2165 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2166 statement, determine which of the outgoing edges will be taken out of the
2167 block. Return NULL if either edge may be taken. */
2168
2169 static edge
2170 find_taken_edge_computed_goto (basic_block bb, tree val)
2171 {
2172 basic_block dest;
2173 edge e = NULL;
2174
2175 dest = label_to_block (val);
2176 if (dest)
2177 {
2178 e = find_edge (bb, dest);
2179 gcc_assert (e != NULL);
2180 }
2181
2182 return e;
2183 }
2184
2185 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2186 statement, determine which of the two edges will be taken out of the
2187 block. Return NULL if either edge may be taken. */
2188
2189 static edge
2190 find_taken_edge_cond_expr (basic_block bb, tree val)
2191 {
2192 edge true_edge, false_edge;
2193
2194 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2195
2196 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2197 return (integer_zerop (val) ? false_edge : true_edge);
2198 }
2199
2200 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2201 statement, determine which edge will be taken out of the block. Return
2202 NULL if any edge may be taken. */
2203
2204 static edge
2205 find_taken_edge_switch_expr (basic_block bb, tree val)
2206 {
2207 tree switch_expr, taken_case;
2208 basic_block dest_bb;
2209 edge e;
2210
2211 switch_expr = last_stmt (bb);
2212 taken_case = find_case_label_for_value (switch_expr, val);
2213 dest_bb = label_to_block (CASE_LABEL (taken_case));
2214
2215 e = find_edge (bb, dest_bb);
2216 gcc_assert (e);
2217 return e;
2218 }
2219
2220
2221 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2222 We can make optimal use here of the fact that the case labels are
2223 sorted: We can do a binary search for a case matching VAL. */
2224
2225 static tree
2226 find_case_label_for_value (tree switch_expr, tree val)
2227 {
2228 tree vec = SWITCH_LABELS (switch_expr);
2229 size_t low, high, n = TREE_VEC_LENGTH (vec);
2230 tree default_case = TREE_VEC_ELT (vec, n - 1);
2231
2232 for (low = -1, high = n - 1; high - low > 1; )
2233 {
2234 size_t i = (high + low) / 2;
2235 tree t = TREE_VEC_ELT (vec, i);
2236 int cmp;
2237
2238 /* Cache the result of comparing CASE_LOW and val. */
2239 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2240
2241 if (cmp > 0)
2242 high = i;
2243 else
2244 low = i;
2245
2246 if (CASE_HIGH (t) == NULL)
2247 {
2248 /* A singe-valued case label. */
2249 if (cmp == 0)
2250 return t;
2251 }
2252 else
2253 {
2254 /* A case range. We can only handle integer ranges. */
2255 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2256 return t;
2257 }
2258 }
2259
2260 return default_case;
2261 }
2262
2263
2264
2265
2266 /*---------------------------------------------------------------------------
2267 Debugging functions
2268 ---------------------------------------------------------------------------*/
2269
2270 /* Dump tree-specific information of block BB to file OUTF. */
2271
2272 void
2273 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2274 {
2275 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2276 }
2277
2278
2279 /* Dump a basic block on stderr. */
2280
2281 void
2282 debug_tree_bb (basic_block bb)
2283 {
2284 dump_bb (bb, stderr, 0);
2285 }
2286
2287
2288 /* Dump basic block with index N on stderr. */
2289
2290 basic_block
2291 debug_tree_bb_n (int n)
2292 {
2293 debug_tree_bb (BASIC_BLOCK (n));
2294 return BASIC_BLOCK (n);
2295 }
2296
2297
2298 /* Dump the CFG on stderr.
2299
2300 FLAGS are the same used by the tree dumping functions
2301 (see TDF_* in tree-pass.h). */
2302
2303 void
2304 debug_tree_cfg (int flags)
2305 {
2306 dump_tree_cfg (stderr, flags);
2307 }
2308
2309
2310 /* Dump the program showing basic block boundaries on the given FILE.
2311
2312 FLAGS are the same used by the tree dumping functions (see TDF_* in
2313 tree.h). */
2314
2315 void
2316 dump_tree_cfg (FILE *file, int flags)
2317 {
2318 if (flags & TDF_DETAILS)
2319 {
2320 const char *funcname
2321 = lang_hooks.decl_printable_name (current_function_decl, 2);
2322
2323 fputc ('\n', file);
2324 fprintf (file, ";; Function %s\n\n", funcname);
2325 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2326 n_basic_blocks, n_edges, last_basic_block);
2327
2328 brief_dump_cfg (file);
2329 fprintf (file, "\n");
2330 }
2331
2332 if (flags & TDF_STATS)
2333 dump_cfg_stats (file);
2334
2335 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2336 }
2337
2338
2339 /* Dump CFG statistics on FILE. */
2340
2341 void
2342 dump_cfg_stats (FILE *file)
2343 {
2344 static long max_num_merged_labels = 0;
2345 unsigned long size, total = 0;
2346 long num_edges;
2347 basic_block bb;
2348 const char * const fmt_str = "%-30s%-13s%12s\n";
2349 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2350 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2351 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2352 const char *funcname
2353 = lang_hooks.decl_printable_name (current_function_decl, 2);
2354
2355
2356 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2357
2358 fprintf (file, "---------------------------------------------------------\n");
2359 fprintf (file, fmt_str, "", " Number of ", "Memory");
2360 fprintf (file, fmt_str, "", " instances ", "used ");
2361 fprintf (file, "---------------------------------------------------------\n");
2362
2363 size = n_basic_blocks * sizeof (struct basic_block_def);
2364 total += size;
2365 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2366 SCALE (size), LABEL (size));
2367
2368 num_edges = 0;
2369 FOR_EACH_BB (bb)
2370 num_edges += EDGE_COUNT (bb->succs);
2371 size = num_edges * sizeof (struct edge_def);
2372 total += size;
2373 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2374
2375 fprintf (file, "---------------------------------------------------------\n");
2376 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2377 LABEL (total));
2378 fprintf (file, "---------------------------------------------------------\n");
2379 fprintf (file, "\n");
2380
2381 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2382 max_num_merged_labels = cfg_stats.num_merged_labels;
2383
2384 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2385 cfg_stats.num_merged_labels, max_num_merged_labels);
2386
2387 fprintf (file, "\n");
2388 }
2389
2390
2391 /* Dump CFG statistics on stderr. Keep extern so that it's always
2392 linked in the final executable. */
2393
2394 void
2395 debug_cfg_stats (void)
2396 {
2397 dump_cfg_stats (stderr);
2398 }
2399
2400
2401 /* Dump the flowgraph to a .vcg FILE. */
2402
2403 static void
2404 tree_cfg2vcg (FILE *file)
2405 {
2406 edge e;
2407 edge_iterator ei;
2408 basic_block bb;
2409 const char *funcname
2410 = lang_hooks.decl_printable_name (current_function_decl, 2);
2411
2412 /* Write the file header. */
2413 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2414 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2415 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2416
2417 /* Write blocks and edges. */
2418 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2419 {
2420 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2421 e->dest->index);
2422
2423 if (e->flags & EDGE_FAKE)
2424 fprintf (file, " linestyle: dotted priority: 10");
2425 else
2426 fprintf (file, " linestyle: solid priority: 100");
2427
2428 fprintf (file, " }\n");
2429 }
2430 fputc ('\n', file);
2431
2432 FOR_EACH_BB (bb)
2433 {
2434 enum tree_code head_code, end_code;
2435 const char *head_name, *end_name;
2436 int head_line = 0;
2437 int end_line = 0;
2438 tree first = first_stmt (bb);
2439 tree last = last_stmt (bb);
2440
2441 if (first)
2442 {
2443 head_code = TREE_CODE (first);
2444 head_name = tree_code_name[head_code];
2445 head_line = get_lineno (first);
2446 }
2447 else
2448 head_name = "no-statement";
2449
2450 if (last)
2451 {
2452 end_code = TREE_CODE (last);
2453 end_name = tree_code_name[end_code];
2454 end_line = get_lineno (last);
2455 }
2456 else
2457 end_name = "no-statement";
2458
2459 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2460 bb->index, bb->index, head_name, head_line, end_name,
2461 end_line);
2462
2463 FOR_EACH_EDGE (e, ei, bb->succs)
2464 {
2465 if (e->dest == EXIT_BLOCK_PTR)
2466 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2467 else
2468 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2469
2470 if (e->flags & EDGE_FAKE)
2471 fprintf (file, " priority: 10 linestyle: dotted");
2472 else
2473 fprintf (file, " priority: 100 linestyle: solid");
2474
2475 fprintf (file, " }\n");
2476 }
2477
2478 if (bb->next_bb != EXIT_BLOCK_PTR)
2479 fputc ('\n', file);
2480 }
2481
2482 fputs ("}\n\n", file);
2483 }
2484
2485
2486
2487 /*---------------------------------------------------------------------------
2488 Miscellaneous helpers
2489 ---------------------------------------------------------------------------*/
2490
2491 /* Return true if T represents a stmt that always transfers control. */
2492
2493 bool
2494 is_ctrl_stmt (const_tree t)
2495 {
2496 return (TREE_CODE (t) == COND_EXPR
2497 || TREE_CODE (t) == SWITCH_EXPR
2498 || TREE_CODE (t) == GOTO_EXPR
2499 || TREE_CODE (t) == RETURN_EXPR
2500 || TREE_CODE (t) == RESX_EXPR);
2501 }
2502
2503
2504 /* Return true if T is a statement that may alter the flow of control
2505 (e.g., a call to a non-returning function). */
2506
2507 bool
2508 is_ctrl_altering_stmt (const_tree t)
2509 {
2510 const_tree call;
2511
2512 gcc_assert (t);
2513 call = get_call_expr_in (CONST_CAST_TREE (t));
2514 if (call)
2515 {
2516 /* A non-pure/const CALL_EXPR alters flow control if the current
2517 function has nonlocal labels. */
2518 if (TREE_SIDE_EFFECTS (call) && cfun->has_nonlocal_label)
2519 return true;
2520
2521 /* A CALL_EXPR also alters control flow if it does not return. */
2522 if (call_expr_flags (call) & ECF_NORETURN)
2523 return true;
2524 }
2525
2526 /* OpenMP directives alter control flow. */
2527 if (OMP_DIRECTIVE_P (t))
2528 return true;
2529
2530 /* If a statement can throw, it alters control flow. */
2531 return tree_can_throw_internal (t);
2532 }
2533
2534
2535 /* Return true if T is a computed goto. */
2536
2537 static bool
2538 computed_goto_p (const_tree t)
2539 {
2540 return (TREE_CODE (t) == GOTO_EXPR
2541 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2542 }
2543
2544
2545 /* Return true if T is a simple local goto. */
2546
2547 bool
2548 simple_goto_p (const_tree t)
2549 {
2550 return (TREE_CODE (t) == GOTO_EXPR
2551 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2552 }
2553
2554
2555 /* Return true if T can make an abnormal transfer of control flow.
2556 Transfers of control flow associated with EH are excluded. */
2557
2558 bool
2559 tree_can_make_abnormal_goto (const_tree t)
2560 {
2561 if (computed_goto_p (t))
2562 return true;
2563 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2564 t = GIMPLE_STMT_OPERAND (t, 1);
2565 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2566 t = TREE_OPERAND (t, 0);
2567 if (TREE_CODE (t) == CALL_EXPR)
2568 return TREE_SIDE_EFFECTS (t) && cfun->has_nonlocal_label;
2569 return false;
2570 }
2571
2572
2573 /* Return true if T should start a new basic block. PREV_T is the
2574 statement preceding T. It is used when T is a label or a case label.
2575 Labels should only start a new basic block if their previous statement
2576 wasn't a label. Otherwise, sequence of labels would generate
2577 unnecessary basic blocks that only contain a single label. */
2578
2579 static inline bool
2580 stmt_starts_bb_p (const_tree t, const_tree prev_t)
2581 {
2582 if (t == NULL_TREE)
2583 return false;
2584
2585 /* LABEL_EXPRs start a new basic block only if the preceding
2586 statement wasn't a label of the same type. This prevents the
2587 creation of consecutive blocks that have nothing but a single
2588 label. */
2589 if (TREE_CODE (t) == LABEL_EXPR)
2590 {
2591 /* Nonlocal and computed GOTO targets always start a new block. */
2592 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2593 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2594 return true;
2595
2596 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2597 {
2598 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2599 return true;
2600
2601 cfg_stats.num_merged_labels++;
2602 return false;
2603 }
2604 else
2605 return true;
2606 }
2607
2608 return false;
2609 }
2610
2611
2612 /* Return true if T should end a basic block. */
2613
2614 bool
2615 stmt_ends_bb_p (const_tree t)
2616 {
2617 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2618 }
2619
2620 /* Remove block annotations and other datastructures. */
2621
2622 void
2623 delete_tree_cfg_annotations (void)
2624 {
2625 basic_block bb;
2626 block_stmt_iterator bsi;
2627
2628 /* Remove annotations from every tree in the function. */
2629 FOR_EACH_BB (bb)
2630 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2631 {
2632 tree stmt = bsi_stmt (bsi);
2633 ggc_free (stmt->base.ann);
2634 stmt->base.ann = NULL;
2635 }
2636 label_to_block_map = NULL;
2637 }
2638
2639
2640 /* Return the first statement in basic block BB. */
2641
2642 tree
2643 first_stmt (basic_block bb)
2644 {
2645 block_stmt_iterator i = bsi_start (bb);
2646 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2647 }
2648
2649 /* Return the last statement in basic block BB. */
2650
2651 tree
2652 last_stmt (basic_block bb)
2653 {
2654 block_stmt_iterator b = bsi_last (bb);
2655 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2656 }
2657
2658 /* Return the last statement of an otherwise empty block. Return NULL
2659 if the block is totally empty, or if it contains more than one
2660 statement. */
2661
2662 tree
2663 last_and_only_stmt (basic_block bb)
2664 {
2665 block_stmt_iterator i = bsi_last (bb);
2666 tree last, prev;
2667
2668 if (bsi_end_p (i))
2669 return NULL_TREE;
2670
2671 last = bsi_stmt (i);
2672 bsi_prev (&i);
2673 if (bsi_end_p (i))
2674 return last;
2675
2676 /* Empty statements should no longer appear in the instruction stream.
2677 Everything that might have appeared before should be deleted by
2678 remove_useless_stmts, and the optimizers should just bsi_remove
2679 instead of smashing with build_empty_stmt.
2680
2681 Thus the only thing that should appear here in a block containing
2682 one executable statement is a label. */
2683 prev = bsi_stmt (i);
2684 if (TREE_CODE (prev) == LABEL_EXPR)
2685 return last;
2686 else
2687 return NULL_TREE;
2688 }
2689
2690
2691 /* Mark BB as the basic block holding statement T. */
2692
2693 void
2694 set_bb_for_stmt (tree t, basic_block bb)
2695 {
2696 if (TREE_CODE (t) == PHI_NODE)
2697 PHI_BB (t) = bb;
2698 else if (TREE_CODE (t) == STATEMENT_LIST)
2699 {
2700 tree_stmt_iterator i;
2701 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2702 set_bb_for_stmt (tsi_stmt (i), bb);
2703 }
2704 else
2705 {
2706 stmt_ann_t ann = get_stmt_ann (t);
2707 ann->bb = bb;
2708
2709 /* If the statement is a label, add the label to block-to-labels map
2710 so that we can speed up edge creation for GOTO_EXPRs. */
2711 if (TREE_CODE (t) == LABEL_EXPR)
2712 {
2713 int uid;
2714
2715 t = LABEL_EXPR_LABEL (t);
2716 uid = LABEL_DECL_UID (t);
2717 if (uid == -1)
2718 {
2719 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2720 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2721 if (old_len <= (unsigned) uid)
2722 {
2723 unsigned new_len = 3 * uid / 2;
2724
2725 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2726 new_len);
2727 }
2728 }
2729 else
2730 /* We're moving an existing label. Make sure that we've
2731 removed it from the old block. */
2732 gcc_assert (!bb
2733 || !VEC_index (basic_block, label_to_block_map, uid));
2734 VEC_replace (basic_block, label_to_block_map, uid, bb);
2735 }
2736 }
2737 }
2738
2739 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2740 from one basic block to another.
2741 For BB splitting we can run into quadratic case, so performance is quite
2742 important and knowing that the tables are big enough, change_bb_for_stmt
2743 can inline as leaf function. */
2744 static inline void
2745 change_bb_for_stmt (tree t, basic_block bb)
2746 {
2747 get_stmt_ann (t)->bb = bb;
2748 if (TREE_CODE (t) == LABEL_EXPR)
2749 VEC_replace (basic_block, label_to_block_map,
2750 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2751 }
2752
2753 /* Finds iterator for STMT. */
2754
2755 extern block_stmt_iterator
2756 bsi_for_stmt (tree stmt)
2757 {
2758 block_stmt_iterator bsi;
2759
2760 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2761 if (bsi_stmt (bsi) == stmt)
2762 return bsi;
2763
2764 gcc_unreachable ();
2765 }
2766
2767 /* Mark statement T as modified, and update it. */
2768 static inline void
2769 update_modified_stmts (tree t)
2770 {
2771 if (!ssa_operands_active ())
2772 return;
2773 if (TREE_CODE (t) == STATEMENT_LIST)
2774 {
2775 tree_stmt_iterator i;
2776 tree stmt;
2777 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2778 {
2779 stmt = tsi_stmt (i);
2780 update_stmt_if_modified (stmt);
2781 }
2782 }
2783 else
2784 update_stmt_if_modified (t);
2785 }
2786
2787 /* Insert statement (or statement list) T before the statement
2788 pointed-to by iterator I. M specifies how to update iterator I
2789 after insertion (see enum bsi_iterator_update). */
2790
2791 void
2792 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2793 {
2794 set_bb_for_stmt (t, i->bb);
2795 update_modified_stmts (t);
2796 tsi_link_before (&i->tsi, t, m);
2797 }
2798
2799
2800 /* Insert statement (or statement list) T after the statement
2801 pointed-to by iterator I. M specifies how to update iterator I
2802 after insertion (see enum bsi_iterator_update). */
2803
2804 void
2805 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2806 {
2807 set_bb_for_stmt (t, i->bb);
2808 update_modified_stmts (t);
2809 tsi_link_after (&i->tsi, t, m);
2810 }
2811
2812
2813 /* Remove the statement pointed to by iterator I. The iterator is updated
2814 to the next statement.
2815
2816 When REMOVE_EH_INFO is true we remove the statement pointed to by
2817 iterator I from the EH tables. Otherwise we do not modify the EH
2818 tables.
2819
2820 Generally, REMOVE_EH_INFO should be true when the statement is going to
2821 be removed from the IL and not reinserted elsewhere. */
2822
2823 void
2824 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2825 {
2826 tree t = bsi_stmt (*i);
2827 set_bb_for_stmt (t, NULL);
2828 delink_stmt_imm_use (t);
2829 tsi_delink (&i->tsi);
2830 mark_stmt_modified (t);
2831 if (remove_eh_info)
2832 {
2833 remove_stmt_from_eh_region (t);
2834 gimple_remove_stmt_histograms (cfun, t);
2835 }
2836 }
2837
2838
2839 /* Move the statement at FROM so it comes right after the statement at TO. */
2840
2841 void
2842 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2843 {
2844 tree stmt = bsi_stmt (*from);
2845 bsi_remove (from, false);
2846 /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
2847 move statements to an empty block. */
2848 bsi_insert_after (to, stmt, BSI_NEW_STMT);
2849 }
2850
2851
2852 /* Move the statement at FROM so it comes right before the statement at TO. */
2853
2854 void
2855 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2856 {
2857 tree stmt = bsi_stmt (*from);
2858 bsi_remove (from, false);
2859 /* For consistency with bsi_move_after, it might be better to have
2860 BSI_NEW_STMT here; however, that breaks several places that expect
2861 that TO does not change. */
2862 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2863 }
2864
2865
2866 /* Move the statement at FROM to the end of basic block BB. */
2867
2868 void
2869 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2870 {
2871 block_stmt_iterator last = bsi_last (bb);
2872
2873 /* Have to check bsi_end_p because it could be an empty block. */
2874 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2875 bsi_move_before (from, &last);
2876 else
2877 bsi_move_after (from, &last);
2878 }
2879
2880
2881 /* Replace the contents of the statement pointed to by iterator BSI
2882 with STMT. If UPDATE_EH_INFO is true, the exception handling
2883 information of the original statement is moved to the new statement. */
2884
2885 void
2886 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2887 {
2888 int eh_region;
2889 tree orig_stmt = bsi_stmt (*bsi);
2890
2891 if (stmt == orig_stmt)
2892 return;
2893 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2894 set_bb_for_stmt (stmt, bsi->bb);
2895
2896 /* Preserve EH region information from the original statement, if
2897 requested by the caller. */
2898 if (update_eh_info)
2899 {
2900 eh_region = lookup_stmt_eh_region (orig_stmt);
2901 if (eh_region >= 0)
2902 {
2903 remove_stmt_from_eh_region (orig_stmt);
2904 add_stmt_to_eh_region (stmt, eh_region);
2905 }
2906 }
2907
2908 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2909 gimple_remove_stmt_histograms (cfun, orig_stmt);
2910 delink_stmt_imm_use (orig_stmt);
2911 *bsi_stmt_ptr (*bsi) = stmt;
2912 mark_stmt_modified (stmt);
2913 update_modified_stmts (stmt);
2914 }
2915
2916
2917 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2918 is made to place the statement in an existing basic block, but
2919 sometimes that isn't possible. When it isn't possible, the edge is
2920 split and the statement is added to the new block.
2921
2922 In all cases, the returned *BSI points to the correct location. The
2923 return value is true if insertion should be done after the location,
2924 or false if it should be done before the location. If new basic block
2925 has to be created, it is stored in *NEW_BB. */
2926
2927 static bool
2928 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2929 basic_block *new_bb)
2930 {
2931 basic_block dest, src;
2932 tree tmp;
2933
2934 dest = e->dest;
2935 restart:
2936
2937 /* If the destination has one predecessor which has no PHI nodes,
2938 insert there. Except for the exit block.
2939
2940 The requirement for no PHI nodes could be relaxed. Basically we
2941 would have to examine the PHIs to prove that none of them used
2942 the value set by the statement we want to insert on E. That
2943 hardly seems worth the effort. */
2944 if (single_pred_p (dest)
2945 && ! phi_nodes (dest)
2946 && dest != EXIT_BLOCK_PTR)
2947 {
2948 *bsi = bsi_start (dest);
2949 if (bsi_end_p (*bsi))
2950 return true;
2951
2952 /* Make sure we insert after any leading labels. */
2953 tmp = bsi_stmt (*bsi);
2954 while (TREE_CODE (tmp) == LABEL_EXPR)
2955 {
2956 bsi_next (bsi);
2957 if (bsi_end_p (*bsi))
2958 break;
2959 tmp = bsi_stmt (*bsi);
2960 }
2961
2962 if (bsi_end_p (*bsi))
2963 {
2964 *bsi = bsi_last (dest);
2965 return true;
2966 }
2967 else
2968 return false;
2969 }
2970
2971 /* If the source has one successor, the edge is not abnormal and
2972 the last statement does not end a basic block, insert there.
2973 Except for the entry block. */
2974 src = e->src;
2975 if ((e->flags & EDGE_ABNORMAL) == 0
2976 && single_succ_p (src)
2977 && src != ENTRY_BLOCK_PTR)
2978 {
2979 *bsi = bsi_last (src);
2980 if (bsi_end_p (*bsi))
2981 return true;
2982
2983 tmp = bsi_stmt (*bsi);
2984 if (!stmt_ends_bb_p (tmp))
2985 return true;
2986
2987 /* Insert code just before returning the value. We may need to decompose
2988 the return in the case it contains non-trivial operand. */
2989 if (TREE_CODE (tmp) == RETURN_EXPR)
2990 {
2991 tree op = TREE_OPERAND (tmp, 0);
2992 if (op && !is_gimple_val (op))
2993 {
2994 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2995 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2996 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
2997 }
2998 bsi_prev (bsi);
2999 return true;
3000 }
3001 }
3002
3003 /* Otherwise, create a new basic block, and split this edge. */
3004 dest = split_edge (e);
3005 if (new_bb)
3006 *new_bb = dest;
3007 e = single_pred_edge (dest);
3008 goto restart;
3009 }
3010
3011
3012 /* This routine will commit all pending edge insertions, creating any new
3013 basic blocks which are necessary. */
3014
3015 void
3016 bsi_commit_edge_inserts (void)
3017 {
3018 basic_block bb;
3019 edge e;
3020 edge_iterator ei;
3021
3022 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3023
3024 FOR_EACH_BB (bb)
3025 FOR_EACH_EDGE (e, ei, bb->succs)
3026 bsi_commit_one_edge_insert (e, NULL);
3027 }
3028
3029
3030 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3031 to this block, otherwise set it to NULL. */
3032
3033 void
3034 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3035 {
3036 if (new_bb)
3037 *new_bb = NULL;
3038 if (PENDING_STMT (e))
3039 {
3040 block_stmt_iterator bsi;
3041 tree stmt = PENDING_STMT (e);
3042
3043 PENDING_STMT (e) = NULL_TREE;
3044
3045 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3046 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3047 else
3048 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3049 }
3050 }
3051
3052
3053 /* Add STMT to the pending list of edge E. No actual insertion is
3054 made until a call to bsi_commit_edge_inserts () is made. */
3055
3056 void
3057 bsi_insert_on_edge (edge e, tree stmt)
3058 {
3059 append_to_statement_list (stmt, &PENDING_STMT (e));
3060 }
3061
3062 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3063 block has to be created, it is returned. */
3064
3065 basic_block
3066 bsi_insert_on_edge_immediate (edge e, tree stmt)
3067 {
3068 block_stmt_iterator bsi;
3069 basic_block new_bb = NULL;
3070
3071 gcc_assert (!PENDING_STMT (e));
3072
3073 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3074 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3075 else
3076 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3077
3078 return new_bb;
3079 }
3080
3081 /*---------------------------------------------------------------------------
3082 Tree specific functions for CFG manipulation
3083 ---------------------------------------------------------------------------*/
3084
3085 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3086
3087 static void
3088 reinstall_phi_args (edge new_edge, edge old_edge)
3089 {
3090 tree phi;
3091 edge_var_map_vector v;
3092 edge_var_map *vm;
3093 int i;
3094
3095 v = redirect_edge_var_map_vector (old_edge);
3096 if (!v)
3097 return;
3098
3099 for (i = 0, phi = phi_nodes (new_edge->dest);
3100 VEC_iterate (edge_var_map, v, i, vm) && phi;
3101 i++, phi = PHI_CHAIN (phi))
3102 {
3103 tree result = redirect_edge_var_map_result (vm);
3104 tree arg = redirect_edge_var_map_def (vm);
3105
3106 gcc_assert (result == PHI_RESULT (phi));
3107
3108 add_phi_arg (phi, arg, new_edge);
3109 }
3110
3111 redirect_edge_var_map_clear (old_edge);
3112 }
3113
3114 /* Returns the basic block after which the new basic block created
3115 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3116 near its "logical" location. This is of most help to humans looking
3117 at debugging dumps. */
3118
3119 static basic_block
3120 split_edge_bb_loc (edge edge_in)
3121 {
3122 basic_block dest = edge_in->dest;
3123
3124 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3125 return edge_in->src;
3126 else
3127 return dest->prev_bb;
3128 }
3129
3130 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3131 Abort on abnormal edges. */
3132
3133 static basic_block
3134 tree_split_edge (edge edge_in)
3135 {
3136 basic_block new_bb, after_bb, dest;
3137 edge new_edge, e;
3138
3139 /* Abnormal edges cannot be split. */
3140 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3141
3142 dest = edge_in->dest;
3143
3144 after_bb = split_edge_bb_loc (edge_in);
3145
3146 new_bb = create_empty_bb (after_bb);
3147 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3148 new_bb->count = edge_in->count;
3149 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3150 new_edge->probability = REG_BR_PROB_BASE;
3151 new_edge->count = edge_in->count;
3152
3153 e = redirect_edge_and_branch (edge_in, new_bb);
3154 gcc_assert (e == edge_in);
3155 reinstall_phi_args (new_edge, e);
3156
3157 return new_bb;
3158 }
3159
3160 /* Callback for walk_tree, check that all elements with address taken are
3161 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3162 inside a PHI node. */
3163
3164 static tree
3165 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3166 {
3167 tree t = *tp, x;
3168
3169 if (TYPE_P (t))
3170 *walk_subtrees = 0;
3171
3172 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3173 #define CHECK_OP(N, MSG) \
3174 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3175 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3176
3177 switch (TREE_CODE (t))
3178 {
3179 case SSA_NAME:
3180 if (SSA_NAME_IN_FREE_LIST (t))
3181 {
3182 error ("SSA name in freelist but still referenced");
3183 return *tp;
3184 }
3185 break;
3186
3187 case ASSERT_EXPR:
3188 x = fold (ASSERT_EXPR_COND (t));
3189 if (x == boolean_false_node)
3190 {
3191 error ("ASSERT_EXPR with an always-false condition");
3192 return *tp;
3193 }
3194 break;
3195
3196 case MODIFY_EXPR:
3197 gcc_unreachable ();
3198
3199 case GIMPLE_MODIFY_STMT:
3200 x = GIMPLE_STMT_OPERAND (t, 0);
3201 if (TREE_CODE (x) == BIT_FIELD_REF
3202 && is_gimple_reg (TREE_OPERAND (x, 0)))
3203 {
3204 error ("GIMPLE register modified with BIT_FIELD_REF");
3205 return t;
3206 }
3207 break;
3208
3209 case ADDR_EXPR:
3210 {
3211 bool old_constant;
3212 bool old_side_effects;
3213 bool new_constant;
3214 bool new_side_effects;
3215
3216 gcc_assert (is_gimple_address (t));
3217
3218 old_constant = TREE_CONSTANT (t);
3219 old_side_effects = TREE_SIDE_EFFECTS (t);
3220
3221 recompute_tree_invariant_for_addr_expr (t);
3222 new_side_effects = TREE_SIDE_EFFECTS (t);
3223 new_constant = TREE_CONSTANT (t);
3224
3225 if (old_constant != new_constant)
3226 {
3227 error ("constant not recomputed when ADDR_EXPR changed");
3228 return t;
3229 }
3230 if (old_side_effects != new_side_effects)
3231 {
3232 error ("side effects not recomputed when ADDR_EXPR changed");
3233 return t;
3234 }
3235
3236 /* Skip any references (they will be checked when we recurse down the
3237 tree) and ensure that any variable used as a prefix is marked
3238 addressable. */
3239 for (x = TREE_OPERAND (t, 0);
3240 handled_component_p (x);
3241 x = TREE_OPERAND (x, 0))
3242 ;
3243
3244 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3245 return NULL;
3246 if (!TREE_ADDRESSABLE (x))
3247 {
3248 error ("address taken, but ADDRESSABLE bit not set");
3249 return x;
3250 }
3251
3252 break;
3253 }
3254
3255 case COND_EXPR:
3256 x = COND_EXPR_COND (t);
3257 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3258 {
3259 error ("non-integral used in condition");
3260 return x;
3261 }
3262 if (!is_gimple_condexpr (x))
3263 {
3264 error ("invalid conditional operand");
3265 return x;
3266 }
3267 break;
3268
3269 case NON_LVALUE_EXPR:
3270 gcc_unreachable ();
3271
3272 CASE_CONVERT:
3273 case FIX_TRUNC_EXPR:
3274 case FLOAT_EXPR:
3275 case NEGATE_EXPR:
3276 case ABS_EXPR:
3277 case BIT_NOT_EXPR:
3278 case TRUTH_NOT_EXPR:
3279 CHECK_OP (0, "invalid operand to unary operator");
3280 break;
3281
3282 case REALPART_EXPR:
3283 case IMAGPART_EXPR:
3284 case COMPONENT_REF:
3285 case ARRAY_REF:
3286 case ARRAY_RANGE_REF:
3287 case BIT_FIELD_REF:
3288 case VIEW_CONVERT_EXPR:
3289 /* We have a nest of references. Verify that each of the operands
3290 that determine where to reference is either a constant or a variable,
3291 verify that the base is valid, and then show we've already checked
3292 the subtrees. */
3293 while (handled_component_p (t))
3294 {
3295 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3296 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3297 else if (TREE_CODE (t) == ARRAY_REF
3298 || TREE_CODE (t) == ARRAY_RANGE_REF)
3299 {
3300 CHECK_OP (1, "invalid array index");
3301 if (TREE_OPERAND (t, 2))
3302 CHECK_OP (2, "invalid array lower bound");
3303 if (TREE_OPERAND (t, 3))
3304 CHECK_OP (3, "invalid array stride");
3305 }
3306 else if (TREE_CODE (t) == BIT_FIELD_REF)
3307 {
3308 if (!host_integerp (TREE_OPERAND (t, 1), 1)
3309 || !host_integerp (TREE_OPERAND (t, 2), 1))
3310 {
3311 error ("invalid position or size operand to BIT_FIELD_REF");
3312 return t;
3313 }
3314 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3315 && (TYPE_PRECISION (TREE_TYPE (t))
3316 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3317 {
3318 error ("integral result type precision does not match "
3319 "field size of BIT_FIELD_REF");
3320 return t;
3321 }
3322 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3323 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
3324 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
3325 {
3326 error ("mode precision of non-integral result does not "
3327 "match field size of BIT_FIELD_REF");
3328 return t;
3329 }
3330 }
3331
3332 t = TREE_OPERAND (t, 0);
3333 }
3334
3335 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3336 {
3337 error ("invalid reference prefix");
3338 return t;
3339 }
3340 *walk_subtrees = 0;
3341 break;
3342 case PLUS_EXPR:
3343 case MINUS_EXPR:
3344 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3345 POINTER_PLUS_EXPR. */
3346 if (POINTER_TYPE_P (TREE_TYPE (t)))
3347 {
3348 error ("invalid operand to plus/minus, type is a pointer");
3349 return t;
3350 }
3351 CHECK_OP (0, "invalid operand to binary operator");
3352 CHECK_OP (1, "invalid operand to binary operator");
3353 break;
3354
3355 case POINTER_PLUS_EXPR:
3356 /* Check to make sure the first operand is a pointer or reference type. */
3357 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3358 {
3359 error ("invalid operand to pointer plus, first operand is not a pointer");
3360 return t;
3361 }
3362 /* Check to make sure the second operand is an integer with type of
3363 sizetype. */
3364 if (!useless_type_conversion_p (sizetype,
3365 TREE_TYPE (TREE_OPERAND (t, 1))))
3366 {
3367 error ("invalid operand to pointer plus, second operand is not an "
3368 "integer with type of sizetype.");
3369 return t;
3370 }
3371 /* FALLTHROUGH */
3372 case LT_EXPR:
3373 case LE_EXPR:
3374 case GT_EXPR:
3375 case GE_EXPR:
3376 case EQ_EXPR:
3377 case NE_EXPR:
3378 case UNORDERED_EXPR:
3379 case ORDERED_EXPR:
3380 case UNLT_EXPR:
3381 case UNLE_EXPR:
3382 case UNGT_EXPR:
3383 case UNGE_EXPR:
3384 case UNEQ_EXPR:
3385 case LTGT_EXPR:
3386 case MULT_EXPR:
3387 case TRUNC_DIV_EXPR:
3388 case CEIL_DIV_EXPR:
3389 case FLOOR_DIV_EXPR:
3390 case ROUND_DIV_EXPR:
3391 case TRUNC_MOD_EXPR:
3392 case CEIL_MOD_EXPR:
3393 case FLOOR_MOD_EXPR:
3394 case ROUND_MOD_EXPR:
3395 case RDIV_EXPR:
3396 case EXACT_DIV_EXPR:
3397 case MIN_EXPR:
3398 case MAX_EXPR:
3399 case LSHIFT_EXPR:
3400 case RSHIFT_EXPR:
3401 case LROTATE_EXPR:
3402 case RROTATE_EXPR:
3403 case BIT_IOR_EXPR:
3404 case BIT_XOR_EXPR:
3405 case BIT_AND_EXPR:
3406 CHECK_OP (0, "invalid operand to binary operator");
3407 CHECK_OP (1, "invalid operand to binary operator");
3408 break;
3409
3410 case CONSTRUCTOR:
3411 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3412 *walk_subtrees = 0;
3413 break;
3414
3415 default:
3416 break;
3417 }
3418 return NULL;
3419
3420 #undef CHECK_OP
3421 }
3422
3423 /* Verifies if EXPR is a valid GIMPLE unary expression. Returns true
3424 if there is an error, otherwise false. */
3425
3426 static bool
3427 verify_gimple_unary_expr (const_tree expr)
3428 {
3429 tree op = TREE_OPERAND (expr, 0);
3430 tree type = TREE_TYPE (expr);
3431
3432 if (!is_gimple_val (op))
3433 {
3434 error ("invalid operand in unary expression");
3435 return true;
3436 }
3437
3438 /* For general unary expressions we have the operations type
3439 as the effective type the operation is carried out on. So all
3440 we need to require is that the operand is trivially convertible
3441 to that type. */
3442 if (!useless_type_conversion_p (type, TREE_TYPE (op)))
3443 {
3444 error ("type mismatch in unary expression");
3445 debug_generic_expr (type);
3446 debug_generic_expr (TREE_TYPE (op));
3447 return true;
3448 }
3449
3450 return false;
3451 }
3452
3453 /* Verifies if EXPR is a valid GIMPLE binary expression. Returns true
3454 if there is an error, otherwise false. */
3455
3456 static bool
3457 verify_gimple_binary_expr (const_tree expr)
3458 {
3459 tree op0 = TREE_OPERAND (expr, 0);
3460 tree op1 = TREE_OPERAND (expr, 1);
3461 tree type = TREE_TYPE (expr);
3462
3463 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3464 {
3465 error ("invalid operands in binary expression");
3466 return true;
3467 }
3468
3469 /* For general binary expressions we have the operations type
3470 as the effective type the operation is carried out on. So all
3471 we need to require is that both operands are trivially convertible
3472 to that type. */
3473 if (!useless_type_conversion_p (type, TREE_TYPE (op0))
3474 || !useless_type_conversion_p (type, TREE_TYPE (op1)))
3475 {
3476 error ("type mismatch in binary expression");
3477 debug_generic_stmt (type);
3478 debug_generic_stmt (TREE_TYPE (op0));
3479 debug_generic_stmt (TREE_TYPE (op1));
3480 return true;
3481 }
3482
3483 return false;
3484 }
3485
3486 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3487 Returns true if there is an error, otherwise false. */
3488
3489 static bool
3490 verify_gimple_min_lval (tree expr)
3491 {
3492 tree op;
3493
3494 if (is_gimple_id (expr))
3495 return false;
3496
3497 if (TREE_CODE (expr) != INDIRECT_REF
3498 && TREE_CODE (expr) != ALIGN_INDIRECT_REF
3499 && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF)
3500 {
3501 error ("invalid expression for min lvalue");
3502 return true;
3503 }
3504
3505 op = TREE_OPERAND (expr, 0);
3506 if (!is_gimple_val (op))
3507 {
3508 error ("invalid operand in indirect reference");
3509 debug_generic_stmt (op);
3510 return true;
3511 }
3512 if (!useless_type_conversion_p (TREE_TYPE (expr),
3513 TREE_TYPE (TREE_TYPE (op))))
3514 {
3515 error ("type mismatch in indirect reference");
3516 debug_generic_stmt (TREE_TYPE (expr));
3517 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3518 return true;
3519 }
3520
3521 return false;
3522 }
3523
3524 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3525 if there is an error, otherwise false. */
3526
3527 static bool
3528 verify_gimple_reference (tree expr)
3529 {
3530 while (handled_component_p (expr))
3531 {
3532 tree op = TREE_OPERAND (expr, 0);
3533
3534 if (TREE_CODE (expr) == ARRAY_REF
3535 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3536 {
3537 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3538 || (TREE_OPERAND (expr, 2)
3539 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3540 || (TREE_OPERAND (expr, 3)
3541 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3542 {
3543 error ("invalid operands to array reference");
3544 debug_generic_stmt (expr);
3545 return true;
3546 }
3547 }
3548
3549 /* Verify if the reference array element types are compatible. */
3550 if (TREE_CODE (expr) == ARRAY_REF
3551 && !useless_type_conversion_p (TREE_TYPE (expr),
3552 TREE_TYPE (TREE_TYPE (op))))
3553 {
3554 error ("type mismatch in array reference");
3555 debug_generic_stmt (TREE_TYPE (expr));
3556 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3557 return true;
3558 }
3559 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3560 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3561 TREE_TYPE (TREE_TYPE (op))))
3562 {
3563 error ("type mismatch in array range reference");
3564 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3565 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3566 return true;
3567 }
3568
3569 if ((TREE_CODE (expr) == REALPART_EXPR
3570 || TREE_CODE (expr) == IMAGPART_EXPR)
3571 && !useless_type_conversion_p (TREE_TYPE (expr),
3572 TREE_TYPE (TREE_TYPE (op))))
3573 {
3574 error ("type mismatch in real/imagpart reference");
3575 debug_generic_stmt (TREE_TYPE (expr));
3576 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3577 return true;
3578 }
3579
3580 if (TREE_CODE (expr) == COMPONENT_REF
3581 && !useless_type_conversion_p (TREE_TYPE (expr),
3582 TREE_TYPE (TREE_OPERAND (expr, 1))))
3583 {
3584 error ("type mismatch in component reference");
3585 debug_generic_stmt (TREE_TYPE (expr));
3586 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3587 return true;
3588 }
3589
3590 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3591 is nothing to verify. Gross mismatches at most invoke
3592 undefined behavior. */
3593
3594 expr = op;
3595 }
3596
3597 return verify_gimple_min_lval (expr);
3598 }
3599
3600 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3601 list of pointer-to types that is trivially convertible to DEST. */
3602
3603 static bool
3604 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3605 {
3606 tree src;
3607
3608 if (!TYPE_POINTER_TO (src_obj))
3609 return true;
3610
3611 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3612 if (useless_type_conversion_p (dest, src))
3613 return true;
3614
3615 return false;
3616 }
3617
3618 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3619 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3620
3621 static bool
3622 valid_fixed_convert_types_p (tree type1, tree type2)
3623 {
3624 return (FIXED_POINT_TYPE_P (type1)
3625 && (INTEGRAL_TYPE_P (type2)
3626 || SCALAR_FLOAT_TYPE_P (type2)
3627 || FIXED_POINT_TYPE_P (type2)));
3628 }
3629
3630 /* Verify the GIMPLE expression EXPR. Returns true if there is an
3631 error, otherwise false. */
3632
3633 static bool
3634 verify_gimple_expr (tree expr)
3635 {
3636 tree type = TREE_TYPE (expr);
3637
3638 if (is_gimple_val (expr))
3639 return false;
3640
3641 /* Special codes we cannot handle via their class. */
3642 switch (TREE_CODE (expr))
3643 {
3644 CASE_CONVERT:
3645 {
3646 tree op = TREE_OPERAND (expr, 0);
3647 if (!is_gimple_val (op))
3648 {
3649 error ("invalid operand in conversion");
3650 return true;
3651 }
3652
3653 /* Allow conversions between integral types and between
3654 pointer types. */
3655 if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3656 || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
3657 return false;
3658
3659 /* Allow conversions between integral types and pointers only if
3660 there is no sign or zero extension involved. */
3661 if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
3662 || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
3663 && (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op))
3664 /* For targets were the precision of sizetype doesn't
3665 match that of pointers we need the following. */
3666 || type == sizetype || TREE_TYPE (op) == sizetype))
3667 return false;
3668
3669 /* Allow conversion from integer to offset type and vice versa. */
3670 if ((TREE_CODE (type) == OFFSET_TYPE
3671 && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
3672 || (TREE_CODE (type) == INTEGER_TYPE
3673 && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
3674 return false;
3675
3676 /* Otherwise assert we are converting between types of the
3677 same kind. */
3678 if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
3679 {
3680 error ("invalid types in nop conversion");
3681 debug_generic_expr (type);
3682 debug_generic_expr (TREE_TYPE (op));
3683 return true;
3684 }
3685
3686 return false;
3687 }
3688
3689 case FIXED_CONVERT_EXPR:
3690 {
3691 tree op = TREE_OPERAND (expr, 0);
3692 if (!is_gimple_val (op))
3693 {
3694 error ("invalid operand in conversion");
3695 return true;
3696 }
3697
3698 if (!valid_fixed_convert_types_p (type, TREE_TYPE (op))
3699 && !valid_fixed_convert_types_p (TREE_TYPE (op), type))
3700 {
3701 error ("invalid types in fixed-point conversion");
3702 debug_generic_expr (type);
3703 debug_generic_expr (TREE_TYPE (op));
3704 return true;
3705 }
3706
3707 return false;
3708 }
3709
3710 case FLOAT_EXPR:
3711 {
3712 tree op = TREE_OPERAND (expr, 0);
3713 if (!is_gimple_val (op))
3714 {
3715 error ("invalid operand in int to float conversion");
3716 return true;
3717 }
3718 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3719 || !SCALAR_FLOAT_TYPE_P (type))
3720 {
3721 error ("invalid types in conversion to floating point");
3722 debug_generic_expr (type);
3723 debug_generic_expr (TREE_TYPE (op));
3724 return true;
3725 }
3726 return false;
3727 }
3728
3729 case FIX_TRUNC_EXPR:
3730 {
3731 tree op = TREE_OPERAND (expr, 0);
3732 if (!is_gimple_val (op))
3733 {
3734 error ("invalid operand in float to int conversion");
3735 return true;
3736 }
3737 if (!INTEGRAL_TYPE_P (type)
3738 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
3739 {
3740 error ("invalid types in conversion to integer");
3741 debug_generic_expr (type);
3742 debug_generic_expr (TREE_TYPE (op));
3743 return true;
3744 }
3745 return false;
3746 }
3747
3748 case COMPLEX_EXPR:
3749 {
3750 tree op0 = TREE_OPERAND (expr, 0);
3751 tree op1 = TREE_OPERAND (expr, 1);
3752 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3753 {
3754 error ("invalid operands in complex expression");
3755 return true;
3756 }
3757 if (!TREE_CODE (type) == COMPLEX_TYPE
3758 || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3759 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
3760 || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3761 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
3762 || !useless_type_conversion_p (TREE_TYPE (type),
3763 TREE_TYPE (op0))
3764 || !useless_type_conversion_p (TREE_TYPE (type),
3765 TREE_TYPE (op1)))
3766 {
3767 error ("type mismatch in complex expression");
3768 debug_generic_stmt (TREE_TYPE (expr));
3769 debug_generic_stmt (TREE_TYPE (op0));
3770 debug_generic_stmt (TREE_TYPE (op1));
3771 return true;
3772 }
3773 return false;
3774 }
3775
3776 case CONSTRUCTOR:
3777 {
3778 /* This is used like COMPLEX_EXPR but for vectors. */
3779 if (TREE_CODE (type) != VECTOR_TYPE)
3780 {
3781 error ("constructor not allowed for non-vector types");
3782 debug_generic_stmt (type);
3783 return true;
3784 }
3785 /* FIXME: verify constructor arguments. */
3786 return false;
3787 }
3788
3789 case LSHIFT_EXPR:
3790 case RSHIFT_EXPR:
3791 case LROTATE_EXPR:
3792 case RROTATE_EXPR:
3793 {
3794 tree op0 = TREE_OPERAND (expr, 0);
3795 tree op1 = TREE_OPERAND (expr, 1);
3796 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3797 {
3798 error ("invalid operands in shift expression");
3799 return true;
3800 }
3801 if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
3802 || !useless_type_conversion_p (type, TREE_TYPE (op0)))
3803 {
3804 error ("type mismatch in shift expression");
3805 debug_generic_stmt (TREE_TYPE (expr));
3806 debug_generic_stmt (TREE_TYPE (op0));
3807 debug_generic_stmt (TREE_TYPE (op1));
3808 return true;
3809 }
3810 return false;
3811 }
3812
3813 case PLUS_EXPR:
3814 case MINUS_EXPR:
3815 {
3816 tree op0 = TREE_OPERAND (expr, 0);
3817 tree op1 = TREE_OPERAND (expr, 1);
3818 if (POINTER_TYPE_P (type)
3819 || POINTER_TYPE_P (TREE_TYPE (op0))
3820 || POINTER_TYPE_P (TREE_TYPE (op1)))
3821 {
3822 error ("invalid (pointer) operands to plus/minus");
3823 return true;
3824 }
3825 /* Continue with generic binary expression handling. */
3826 break;
3827 }
3828
3829 case POINTER_PLUS_EXPR:
3830 {
3831 tree op0 = TREE_OPERAND (expr, 0);
3832 tree op1 = TREE_OPERAND (expr, 1);
3833 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3834 {
3835 error ("invalid operands in pointer plus expression");
3836 return true;
3837 }
3838 if (!POINTER_TYPE_P (TREE_TYPE (op0))
3839 || !useless_type_conversion_p (type, TREE_TYPE (op0))
3840 || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
3841 {
3842 error ("type mismatch in pointer plus expression");
3843 debug_generic_stmt (type);
3844 debug_generic_stmt (TREE_TYPE (op0));
3845 debug_generic_stmt (TREE_TYPE (op1));
3846 return true;
3847 }
3848 return false;
3849 }
3850
3851 case COND_EXPR:
3852 {
3853 tree op0 = TREE_OPERAND (expr, 0);
3854 tree op1 = TREE_OPERAND (expr, 1);
3855 tree op2 = TREE_OPERAND (expr, 2);
3856 if ((!is_gimple_val (op1)
3857 && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
3858 || (!is_gimple_val (op2)
3859 && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
3860 {
3861 error ("invalid operands in conditional expression");
3862 return true;
3863 }
3864 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3865 || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
3866 && !useless_type_conversion_p (type, TREE_TYPE (op1)))
3867 || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
3868 && !useless_type_conversion_p (type, TREE_TYPE (op2))))
3869 {
3870 error ("type mismatch in conditional expression");
3871 debug_generic_stmt (type);
3872 debug_generic_stmt (TREE_TYPE (op0));
3873 debug_generic_stmt (TREE_TYPE (op1));
3874 debug_generic_stmt (TREE_TYPE (op2));
3875 return true;
3876 }
3877 return verify_gimple_expr (op0);
3878 }
3879
3880 case ADDR_EXPR:
3881 {
3882 tree op = TREE_OPERAND (expr, 0);
3883 if (!is_gimple_addressable (op))
3884 {
3885 error ("invalid operand in unary expression");
3886 return true;
3887 }
3888 if (!one_pointer_to_useless_type_conversion_p (type, TREE_TYPE (op))
3889 /* FIXME: a longstanding wart, &a == &a[0]. */
3890 && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
3891 || !one_pointer_to_useless_type_conversion_p (type,
3892 TREE_TYPE (TREE_TYPE (op)))))
3893 {
3894 error ("type mismatch in address expression");
3895 debug_generic_stmt (TREE_TYPE (expr));
3896 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3897 return true;
3898 }
3899
3900 return verify_gimple_reference (op);
3901 }
3902
3903 case TRUTH_ANDIF_EXPR:
3904 case TRUTH_ORIF_EXPR:
3905 gcc_unreachable ();
3906
3907 case TRUTH_AND_EXPR:
3908 case TRUTH_OR_EXPR:
3909 case TRUTH_XOR_EXPR:
3910 {
3911 tree op0 = TREE_OPERAND (expr, 0);
3912 tree op1 = TREE_OPERAND (expr, 1);
3913
3914 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3915 {
3916 error ("invalid operands in truth expression");
3917 return true;
3918 }
3919
3920 /* We allow any kind of integral typed argument and result. */
3921 if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
3922 || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
3923 || !INTEGRAL_TYPE_P (type))
3924 {
3925 error ("type mismatch in binary truth expression");
3926 debug_generic_stmt (type);
3927 debug_generic_stmt (TREE_TYPE (op0));
3928 debug_generic_stmt (TREE_TYPE (op1));
3929 return true;
3930 }
3931
3932 return false;
3933 }
3934
3935 case TRUTH_NOT_EXPR:
3936 {
3937 tree op = TREE_OPERAND (expr, 0);
3938
3939 if (!is_gimple_val (op))
3940 {
3941 error ("invalid operand in unary not");
3942 return true;
3943 }
3944
3945 /* For TRUTH_NOT_EXPR we can have any kind of integral
3946 typed arguments and results. */
3947 if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
3948 || !INTEGRAL_TYPE_P (type))
3949 {
3950 error ("type mismatch in not expression");
3951 debug_generic_expr (TREE_TYPE (expr));
3952 debug_generic_expr (TREE_TYPE (op));
3953 return true;
3954 }
3955
3956 return false;
3957 }
3958
3959 case CALL_EXPR:
3960 /* FIXME. The C frontend passes unpromoted arguments in case it
3961 didn't see a function declaration before the call. */
3962 {
3963 tree decl = CALL_EXPR_FN (expr);
3964
3965 if (TREE_CODE (decl) == FUNCTION_DECL
3966 && DECL_LOOPING_CONST_OR_PURE_P (decl)
3967 && (!DECL_PURE_P (decl))
3968 && (!TREE_READONLY (decl)))
3969 {
3970 error ("invalid pure const state for function");
3971 return true;
3972 }
3973 return false;
3974 }
3975
3976 case OBJ_TYPE_REF:
3977 /* FIXME. */
3978 return false;
3979
3980 default:;
3981 }
3982
3983 /* Generic handling via classes. */
3984 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3985 {
3986 case tcc_unary:
3987 return verify_gimple_unary_expr (expr);
3988
3989 case tcc_binary:
3990 return verify_gimple_binary_expr (expr);
3991
3992 case tcc_reference:
3993 return verify_gimple_reference (expr);
3994
3995 case tcc_comparison:
3996 {
3997 tree op0 = TREE_OPERAND (expr, 0);
3998 tree op1 = TREE_OPERAND (expr, 1);
3999 if (!is_gimple_val (op0) || !is_gimple_val (op1))
4000 {
4001 error ("invalid operands in comparison expression");
4002 return true;
4003 }
4004 /* For comparisons we do not have the operations type as the
4005 effective type the comparison is carried out in. Instead
4006 we require that either the first operand is trivially
4007 convertible into the second, or the other way around.
4008 The resulting type of a comparison may be any integral type.
4009 Because we special-case pointers to void we allow
4010 comparisons of pointers with the same mode as well. */
4011 if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
4012 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
4013 && (!POINTER_TYPE_P (TREE_TYPE (op0))
4014 || !POINTER_TYPE_P (TREE_TYPE (op1))
4015 || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
4016 || !INTEGRAL_TYPE_P (type))
4017 {
4018 error ("type mismatch in comparison expression");
4019 debug_generic_stmt (TREE_TYPE (expr));
4020 debug_generic_stmt (TREE_TYPE (op0));
4021 debug_generic_stmt (TREE_TYPE (op1));
4022 return true;
4023 }
4024 break;
4025 }
4026
4027 default:
4028 gcc_unreachable ();
4029 }
4030
4031 return false;
4032 }
4033
4034 /* Verify the GIMPLE assignment statement STMT. Returns true if there
4035 is an error, otherwise false. */
4036
4037 static bool
4038 verify_gimple_modify_stmt (const_tree stmt)
4039 {
4040 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
4041 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
4042
4043 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
4044
4045 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4046 TREE_TYPE (rhs)))
4047 {
4048 error ("non-trivial conversion at assignment");
4049 debug_generic_expr (TREE_TYPE (lhs));
4050 debug_generic_expr (TREE_TYPE (rhs));
4051 return true;
4052 }
4053
4054 /* Loads/stores from/to a variable are ok. */
4055 if ((is_gimple_val (lhs)
4056 && is_gimple_variable (rhs))
4057 || (is_gimple_val (rhs)
4058 && is_gimple_variable (lhs)))
4059 return false;
4060
4061 /* Aggregate copies are ok. */
4062 if (!is_gimple_reg_type (TREE_TYPE (lhs))
4063 && !is_gimple_reg_type (TREE_TYPE (rhs)))
4064 return false;
4065
4066 /* We might get 'loads' from a parameter which is not a gimple value. */
4067 if (TREE_CODE (rhs) == PARM_DECL)
4068 return verify_gimple_expr (lhs);
4069
4070 if (!is_gimple_variable (lhs)
4071 && verify_gimple_expr (lhs))
4072 return true;
4073
4074 if (!is_gimple_variable (rhs)
4075 && verify_gimple_expr (rhs))
4076 return true;
4077
4078 return false;
4079 }
4080
4081 /* Verify the GIMPLE statement STMT. Returns true if there is an
4082 error, otherwise false. */
4083
4084 static bool
4085 verify_gimple_stmt (tree stmt)
4086 {
4087 if (!is_gimple_stmt (stmt))
4088 {
4089 error ("is not a valid GIMPLE statement");
4090 return true;
4091 }
4092
4093 if (OMP_DIRECTIVE_P (stmt))
4094 {
4095 /* OpenMP directives are validated by the FE and never operated
4096 on by the optimizers. Furthermore, OMP_FOR may contain
4097 non-gimple expressions when the main index variable has had
4098 its address taken. This does not affect the loop itself
4099 because the header of an OMP_FOR is merely used to determine
4100 how to setup the parallel iteration. */
4101 return false;
4102 }
4103
4104 switch (TREE_CODE (stmt))
4105 {
4106 case GIMPLE_MODIFY_STMT:
4107 return verify_gimple_modify_stmt (stmt);
4108
4109 case GOTO_EXPR:
4110 case LABEL_EXPR:
4111 return false;
4112
4113 case SWITCH_EXPR:
4114 if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
4115 {
4116 error ("invalid operand to switch statement");
4117 debug_generic_expr (TREE_OPERAND (stmt, 0));
4118 }
4119 return false;
4120
4121 case RETURN_EXPR:
4122 {
4123 tree op = TREE_OPERAND (stmt, 0);
4124
4125 if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
4126 {
4127 error ("type error in return expression");
4128 return true;
4129 }
4130
4131 if (op == NULL_TREE
4132 || TREE_CODE (op) == RESULT_DECL)
4133 return false;
4134
4135 return verify_gimple_modify_stmt (op);
4136 }
4137
4138 case CALL_EXPR:
4139 case COND_EXPR:
4140 return verify_gimple_expr (stmt);
4141
4142 case NOP_EXPR:
4143 case CHANGE_DYNAMIC_TYPE_EXPR:
4144 case ASM_EXPR:
4145 case PREDICT_EXPR:
4146 return false;
4147
4148 default:
4149 gcc_unreachable ();
4150 }
4151 }
4152
4153 /* Verify the GIMPLE statements inside the statement list STMTS.
4154 Returns true if there were any errors. */
4155
4156 static bool
4157 verify_gimple_2 (tree stmts)
4158 {
4159 tree_stmt_iterator tsi;
4160 bool err = false;
4161
4162 for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
4163 {
4164 tree stmt = tsi_stmt (tsi);
4165
4166 switch (TREE_CODE (stmt))
4167 {
4168 case BIND_EXPR:
4169 err |= verify_gimple_2 (BIND_EXPR_BODY (stmt));
4170 break;
4171
4172 case TRY_CATCH_EXPR:
4173 case TRY_FINALLY_EXPR:
4174 err |= verify_gimple_2 (TREE_OPERAND (stmt, 0));
4175 err |= verify_gimple_2 (TREE_OPERAND (stmt, 1));
4176 break;
4177
4178 case CATCH_EXPR:
4179 err |= verify_gimple_2 (CATCH_BODY (stmt));
4180 break;
4181
4182 case EH_FILTER_EXPR:
4183 err |= verify_gimple_2 (EH_FILTER_FAILURE (stmt));
4184 break;
4185
4186 default:
4187 {
4188 bool err2 = verify_gimple_stmt (stmt);
4189 if (err2)
4190 debug_generic_expr (stmt);
4191 err |= err2;
4192 }
4193 }
4194 }
4195
4196 return err;
4197 }
4198
4199
4200 /* Verify the GIMPLE statements inside the statement list STMTS. */
4201
4202 void
4203 verify_gimple_1 (tree stmts)
4204 {
4205 if (verify_gimple_2 (stmts))
4206 internal_error ("verify_gimple failed");
4207 }
4208
4209 /* Verify the GIMPLE statements inside the current function. */
4210
4211 void
4212 verify_gimple (void)
4213 {
4214 verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
4215 }
4216
4217 /* Verify STMT, return true if STMT is not in GIMPLE form.
4218 TODO: Implement type checking. */
4219
4220 static bool
4221 verify_stmt (tree stmt, bool last_in_block)
4222 {
4223 tree addr;
4224
4225 if (OMP_DIRECTIVE_P (stmt))
4226 {
4227 /* OpenMP directives are validated by the FE and never operated
4228 on by the optimizers. Furthermore, OMP_FOR may contain
4229 non-gimple expressions when the main index variable has had
4230 its address taken. This does not affect the loop itself
4231 because the header of an OMP_FOR is merely used to determine
4232 how to setup the parallel iteration. */
4233 return false;
4234 }
4235
4236 if (!is_gimple_stmt (stmt))
4237 {
4238 error ("is not a valid GIMPLE statement");
4239 goto fail;
4240 }
4241
4242 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
4243 if (addr)
4244 {
4245 debug_generic_stmt (addr);
4246 if (addr != stmt)
4247 {
4248 inform ("in statement");
4249 debug_generic_stmt (stmt);
4250 }
4251 return true;
4252 }
4253
4254 /* If the statement is marked as part of an EH region, then it is
4255 expected that the statement could throw. Verify that when we
4256 have optimizations that simplify statements such that we prove
4257 that they cannot throw, that we update other data structures
4258 to match. */
4259 if (lookup_stmt_eh_region (stmt) >= 0)
4260 {
4261 if (!tree_could_throw_p (stmt))
4262 {
4263 error ("statement marked for throw, but doesn%'t");
4264 goto fail;
4265 }
4266 if (!last_in_block && tree_can_throw_internal (stmt))
4267 {
4268 error ("statement marked for throw in middle of block");
4269 goto fail;
4270 }
4271 }
4272
4273 return false;
4274
4275 fail:
4276 debug_generic_stmt (stmt);
4277 return true;
4278 }
4279
4280
4281 /* Return true when the T can be shared. */
4282
4283 static bool
4284 tree_node_can_be_shared (tree t)
4285 {
4286 if (IS_TYPE_OR_DECL_P (t)
4287 || is_gimple_min_invariant (t)
4288 || TREE_CODE (t) == SSA_NAME
4289 || t == error_mark_node
4290 || TREE_CODE (t) == IDENTIFIER_NODE)
4291 return true;
4292
4293 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4294 return true;
4295
4296 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4297 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4298 || TREE_CODE (t) == COMPONENT_REF
4299 || TREE_CODE (t) == REALPART_EXPR
4300 || TREE_CODE (t) == IMAGPART_EXPR)
4301 t = TREE_OPERAND (t, 0);
4302
4303 if (DECL_P (t))
4304 return true;
4305
4306 return false;
4307 }
4308
4309
4310 /* Called via walk_trees. Verify tree sharing. */
4311
4312 static tree
4313 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
4314 {
4315 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4316
4317 if (tree_node_can_be_shared (*tp))
4318 {
4319 *walk_subtrees = false;
4320 return NULL;
4321 }
4322
4323 if (pointer_set_insert (visited, *tp))
4324 return *tp;
4325
4326 return NULL;
4327 }
4328
4329
4330 /* Helper function for verify_gimple_tuples. */
4331
4332 static tree
4333 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4334 void *data ATTRIBUTE_UNUSED)
4335 {
4336 switch (TREE_CODE (*tp))
4337 {
4338 case MODIFY_EXPR:
4339 error ("unexpected non-tuple");
4340 debug_tree (*tp);
4341 gcc_unreachable ();
4342 return NULL_TREE;
4343
4344 default:
4345 return NULL_TREE;
4346 }
4347 }
4348
4349 /* Verify that there are no trees that should have been converted to
4350 gimple tuples. Return true if T contains a node that should have
4351 been converted to a gimple tuple, but hasn't. */
4352
4353 static bool
4354 verify_gimple_tuples (tree t)
4355 {
4356 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
4357 }
4358
4359 static bool eh_error_found;
4360 static int
4361 verify_eh_throw_stmt_node (void **slot, void *data)
4362 {
4363 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4364 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4365
4366 if (!pointer_set_contains (visited, node->stmt))
4367 {
4368 error ("Dead STMT in EH table");
4369 debug_generic_stmt (node->stmt);
4370 eh_error_found = true;
4371 }
4372 return 0;
4373 }
4374
4375 /* Verify the GIMPLE statement chain. */
4376
4377 void
4378 verify_stmts (void)
4379 {
4380 basic_block bb;
4381 block_stmt_iterator bsi;
4382 bool err = false;
4383 struct pointer_set_t *visited, *visited_stmts;
4384 tree addr;
4385
4386 timevar_push (TV_TREE_STMT_VERIFY);
4387 visited = pointer_set_create ();
4388 visited_stmts = pointer_set_create ();
4389
4390 FOR_EACH_BB (bb)
4391 {
4392 tree phi;
4393 int i;
4394
4395 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4396 {
4397 int phi_num_args = PHI_NUM_ARGS (phi);
4398
4399 pointer_set_insert (visited_stmts, phi);
4400 if (bb_for_stmt (phi) != bb)
4401 {
4402 error ("bb_for_stmt (phi) is set to a wrong basic block");
4403 err |= true;
4404 }
4405
4406 for (i = 0; i < phi_num_args; i++)
4407 {
4408 tree t = PHI_ARG_DEF (phi, i);
4409 tree addr;
4410
4411 if (!t)
4412 {
4413 error ("missing PHI def");
4414 debug_generic_stmt (phi);
4415 err |= true;
4416 continue;
4417 }
4418 /* Addressable variables do have SSA_NAMEs but they
4419 are not considered gimple values. */
4420 else if (TREE_CODE (t) != SSA_NAME
4421 && TREE_CODE (t) != FUNCTION_DECL
4422 && !is_gimple_min_invariant (t))
4423 {
4424 error ("PHI def is not a GIMPLE value");
4425 debug_generic_stmt (phi);
4426 debug_generic_stmt (t);
4427 err |= true;
4428 }
4429
4430 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4431 if (addr)
4432 {
4433 error ("incorrect sharing of tree nodes");
4434 debug_generic_stmt (phi);
4435 debug_generic_stmt (addr);
4436 err |= true;
4437 }
4438 }
4439 }
4440
4441 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
4442 {
4443 tree stmt = bsi_stmt (bsi);
4444
4445 pointer_set_insert (visited_stmts, stmt);
4446 err |= verify_gimple_tuples (stmt);
4447
4448 if (bb_for_stmt (stmt) != bb)
4449 {
4450 error ("bb_for_stmt (stmt) is set to a wrong basic block");
4451 err |= true;
4452 }
4453
4454 bsi_next (&bsi);
4455 err |= verify_stmt (stmt, bsi_end_p (bsi));
4456 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
4457 if (addr)
4458 {
4459 error ("incorrect sharing of tree nodes");
4460 debug_generic_stmt (stmt);
4461 debug_generic_stmt (addr);
4462 err |= true;
4463 }
4464 }
4465 }
4466 eh_error_found = false;
4467 if (get_eh_throw_stmt_table (cfun))
4468 htab_traverse (get_eh_throw_stmt_table (cfun),
4469 verify_eh_throw_stmt_node,
4470 visited_stmts);
4471
4472 if (err | eh_error_found)
4473 internal_error ("verify_stmts failed");
4474
4475 pointer_set_destroy (visited);
4476 pointer_set_destroy (visited_stmts);
4477 verify_histograms ();
4478 timevar_pop (TV_TREE_STMT_VERIFY);
4479 }
4480
4481
4482 /* Verifies that the flow information is OK. */
4483
4484 static int
4485 tree_verify_flow_info (void)
4486 {
4487 int err = 0;
4488 basic_block bb;
4489 block_stmt_iterator bsi;
4490 tree stmt;
4491 edge e;
4492 edge_iterator ei;
4493
4494 if (ENTRY_BLOCK_PTR->il.tree)
4495 {
4496 error ("ENTRY_BLOCK has IL associated with it");
4497 err = 1;
4498 }
4499
4500 if (EXIT_BLOCK_PTR->il.tree)
4501 {
4502 error ("EXIT_BLOCK has IL associated with it");
4503 err = 1;
4504 }
4505
4506 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4507 if (e->flags & EDGE_FALLTHRU)
4508 {
4509 error ("fallthru to exit from bb %d", e->src->index);
4510 err = 1;
4511 }
4512
4513 FOR_EACH_BB (bb)
4514 {
4515 bool found_ctrl_stmt = false;
4516
4517 stmt = NULL_TREE;
4518
4519 /* Skip labels on the start of basic block. */
4520 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4521 {
4522 tree prev_stmt = stmt;
4523
4524 stmt = bsi_stmt (bsi);
4525
4526 if (TREE_CODE (stmt) != LABEL_EXPR)
4527 break;
4528
4529 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
4530 {
4531 error ("nonlocal label ");
4532 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4533 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4534 bb->index);
4535 err = 1;
4536 }
4537
4538 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
4539 {
4540 error ("label ");
4541 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4542 fprintf (stderr, " to block does not match in bb %d",
4543 bb->index);
4544 err = 1;
4545 }
4546
4547 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
4548 != current_function_decl)
4549 {
4550 error ("label ");
4551 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4552 fprintf (stderr, " has incorrect context in bb %d",
4553 bb->index);
4554 err = 1;
4555 }
4556 }
4557
4558 /* Verify that body of basic block BB is free of control flow. */
4559 for (; !bsi_end_p (bsi); bsi_next (&bsi))
4560 {
4561 tree stmt = bsi_stmt (bsi);
4562
4563 if (found_ctrl_stmt)
4564 {
4565 error ("control flow in the middle of basic block %d",
4566 bb->index);
4567 err = 1;
4568 }
4569
4570 if (stmt_ends_bb_p (stmt))
4571 found_ctrl_stmt = true;
4572
4573 if (TREE_CODE (stmt) == LABEL_EXPR)
4574 {
4575 error ("label ");
4576 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
4577 fprintf (stderr, " in the middle of basic block %d", bb->index);
4578 err = 1;
4579 }
4580 }
4581
4582 bsi = bsi_last (bb);
4583 if (bsi_end_p (bsi))
4584 continue;
4585
4586 stmt = bsi_stmt (bsi);
4587
4588 err |= verify_eh_edges (stmt);
4589
4590 if (is_ctrl_stmt (stmt))
4591 {
4592 FOR_EACH_EDGE (e, ei, bb->succs)
4593 if (e->flags & EDGE_FALLTHRU)
4594 {
4595 error ("fallthru edge after a control statement in bb %d",
4596 bb->index);
4597 err = 1;
4598 }
4599 }
4600
4601 if (TREE_CODE (stmt) != COND_EXPR)
4602 {
4603 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4604 after anything else but if statement. */
4605 FOR_EACH_EDGE (e, ei, bb->succs)
4606 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4607 {
4608 error ("true/false edge after a non-COND_EXPR in bb %d",
4609 bb->index);
4610 err = 1;
4611 }
4612 }
4613
4614 switch (TREE_CODE (stmt))
4615 {
4616 case COND_EXPR:
4617 {
4618 edge true_edge;
4619 edge false_edge;
4620
4621 if (COND_EXPR_THEN (stmt) != NULL_TREE
4622 || COND_EXPR_ELSE (stmt) != NULL_TREE)
4623 {
4624 error ("COND_EXPR with code in branches at the end of bb %d",
4625 bb->index);
4626 err = 1;
4627 }
4628
4629 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4630
4631 if (!true_edge || !false_edge
4632 || !(true_edge->flags & EDGE_TRUE_VALUE)
4633 || !(false_edge->flags & EDGE_FALSE_VALUE)
4634 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4635 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4636 || EDGE_COUNT (bb->succs) >= 3)
4637 {
4638 error ("wrong outgoing edge flags at end of bb %d",
4639 bb->index);
4640 err = 1;
4641 }
4642 }
4643 break;
4644
4645 case GOTO_EXPR:
4646 if (simple_goto_p (stmt))
4647 {
4648 error ("explicit goto at end of bb %d", bb->index);
4649 err = 1;
4650 }
4651 else
4652 {
4653 /* FIXME. We should double check that the labels in the
4654 destination blocks have their address taken. */
4655 FOR_EACH_EDGE (e, ei, bb->succs)
4656 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4657 | EDGE_FALSE_VALUE))
4658 || !(e->flags & EDGE_ABNORMAL))
4659 {
4660 error ("wrong outgoing edge flags at end of bb %d",
4661 bb->index);
4662 err = 1;
4663 }
4664 }
4665 break;
4666
4667 case RETURN_EXPR:
4668 if (!single_succ_p (bb)
4669 || (single_succ_edge (bb)->flags
4670 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4671 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4672 {
4673 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4674 err = 1;
4675 }
4676 if (single_succ (bb) != EXIT_BLOCK_PTR)
4677 {
4678 error ("return edge does not point to exit in bb %d",
4679 bb->index);
4680 err = 1;
4681 }
4682 break;
4683
4684 case SWITCH_EXPR:
4685 {
4686 tree prev;
4687 edge e;
4688 size_t i, n;
4689 tree vec;
4690
4691 vec = SWITCH_LABELS (stmt);
4692 n = TREE_VEC_LENGTH (vec);
4693
4694 /* Mark all the destination basic blocks. */
4695 for (i = 0; i < n; ++i)
4696 {
4697 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4698 basic_block label_bb = label_to_block (lab);
4699
4700 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4701 label_bb->aux = (void *)1;
4702 }
4703
4704 /* Verify that the case labels are sorted. */
4705 prev = TREE_VEC_ELT (vec, 0);
4706 for (i = 1; i < n; ++i)
4707 {
4708 tree c = TREE_VEC_ELT (vec, i);
4709 if (! CASE_LOW (c))
4710 {
4711 if (i != n - 1)
4712 {
4713 error ("found default case not at end of case vector");
4714 err = 1;
4715 }
4716 continue;
4717 }
4718 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4719 {
4720 error ("case labels not sorted: ");
4721 print_generic_expr (stderr, prev, 0);
4722 fprintf (stderr," is greater than ");
4723 print_generic_expr (stderr, c, 0);
4724 fprintf (stderr," but comes before it.\n");
4725 err = 1;
4726 }
4727 prev = c;
4728 }
4729 /* VRP will remove the default case if it can prove it will
4730 never be executed. So do not verify there always exists
4731 a default case here. */
4732
4733 FOR_EACH_EDGE (e, ei, bb->succs)
4734 {
4735 if (!e->dest->aux)
4736 {
4737 error ("extra outgoing edge %d->%d",
4738 bb->index, e->dest->index);
4739 err = 1;
4740 }
4741 e->dest->aux = (void *)2;
4742 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4743 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4744 {
4745 error ("wrong outgoing edge flags at end of bb %d",
4746 bb->index);
4747 err = 1;
4748 }
4749 }
4750
4751 /* Check that we have all of them. */
4752 for (i = 0; i < n; ++i)
4753 {
4754 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
4755 basic_block label_bb = label_to_block (lab);
4756
4757 if (label_bb->aux != (void *)2)
4758 {
4759 error ("missing edge %i->%i",
4760 bb->index, label_bb->index);
4761 err = 1;
4762 }
4763 }
4764
4765 FOR_EACH_EDGE (e, ei, bb->succs)
4766 e->dest->aux = (void *)0;
4767 }
4768
4769 default: ;
4770 }
4771 }
4772
4773 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4774 verify_dominators (CDI_DOMINATORS);
4775
4776 return err;
4777 }
4778
4779
4780 /* Updates phi nodes after creating a forwarder block joined
4781 by edge FALLTHRU. */
4782
4783 static void
4784 tree_make_forwarder_block (edge fallthru)
4785 {
4786 edge e;
4787 edge_iterator ei;
4788 basic_block dummy, bb;
4789 tree phi, new_phi, var;
4790
4791 dummy = fallthru->src;
4792 bb = fallthru->dest;
4793
4794 if (single_pred_p (bb))
4795 return;
4796
4797 /* If we redirected a branch we must create new PHI nodes at the
4798 start of BB. */
4799 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
4800 {
4801 var = PHI_RESULT (phi);
4802 new_phi = create_phi_node (var, bb);
4803 SSA_NAME_DEF_STMT (var) = new_phi;
4804 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4805 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
4806 }
4807
4808 /* Ensure that the PHI node chain is in the same order. */
4809 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
4810
4811 /* Add the arguments we have stored on edges. */
4812 FOR_EACH_EDGE (e, ei, bb->preds)
4813 {
4814 if (e == fallthru)
4815 continue;
4816
4817 flush_pending_stmts (e);
4818 }
4819 }
4820
4821
4822 /* Return a non-special label in the head of basic block BLOCK.
4823 Create one if it doesn't exist. */
4824
4825 tree
4826 tree_block_label (basic_block bb)
4827 {
4828 block_stmt_iterator i, s = bsi_start (bb);
4829 bool first = true;
4830 tree label, stmt;
4831
4832 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4833 {
4834 stmt = bsi_stmt (i);
4835 if (TREE_CODE (stmt) != LABEL_EXPR)
4836 break;
4837 label = LABEL_EXPR_LABEL (stmt);
4838 if (!DECL_NONLOCAL (label))
4839 {
4840 if (!first)
4841 bsi_move_before (&i, &s);
4842 return label;
4843 }
4844 }
4845
4846 label = create_artificial_label ();
4847 stmt = build1 (LABEL_EXPR, void_type_node, label);
4848 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4849 return label;
4850 }
4851
4852
4853 /* Attempt to perform edge redirection by replacing a possibly complex
4854 jump instruction by a goto or by removing the jump completely.
4855 This can apply only if all edges now point to the same block. The
4856 parameters and return values are equivalent to
4857 redirect_edge_and_branch. */
4858
4859 static edge
4860 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4861 {
4862 basic_block src = e->src;
4863 block_stmt_iterator b;
4864 tree stmt;
4865
4866 /* We can replace or remove a complex jump only when we have exactly
4867 two edges. */
4868 if (EDGE_COUNT (src->succs) != 2
4869 /* Verify that all targets will be TARGET. Specifically, the
4870 edge that is not E must also go to TARGET. */
4871 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4872 return NULL;
4873
4874 b = bsi_last (src);
4875 if (bsi_end_p (b))
4876 return NULL;
4877 stmt = bsi_stmt (b);
4878
4879 if (TREE_CODE (stmt) == COND_EXPR
4880 || TREE_CODE (stmt) == SWITCH_EXPR)
4881 {
4882 bsi_remove (&b, true);
4883 e = ssa_redirect_edge (e, target);
4884 e->flags = EDGE_FALLTHRU;
4885 return e;
4886 }
4887
4888 return NULL;
4889 }
4890
4891
4892 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4893 edge representing the redirected branch. */
4894
4895 static edge
4896 tree_redirect_edge_and_branch (edge e, basic_block dest)
4897 {
4898 basic_block bb = e->src;
4899 block_stmt_iterator bsi;
4900 edge ret;
4901 tree stmt;
4902
4903 if (e->flags & EDGE_ABNORMAL)
4904 return NULL;
4905
4906 if (e->src != ENTRY_BLOCK_PTR
4907 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4908 return ret;
4909
4910 if (e->dest == dest)
4911 return NULL;
4912
4913 bsi = bsi_last (bb);
4914 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4915
4916 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4917 {
4918 case COND_EXPR:
4919 /* For COND_EXPR, we only need to redirect the edge. */
4920 break;
4921
4922 case GOTO_EXPR:
4923 /* No non-abnormal edges should lead from a non-simple goto, and
4924 simple ones should be represented implicitly. */
4925 gcc_unreachable ();
4926
4927 case SWITCH_EXPR:
4928 {
4929 tree cases = get_cases_for_edge (e, stmt);
4930 tree label = tree_block_label (dest);
4931
4932 /* If we have a list of cases associated with E, then use it
4933 as it's a lot faster than walking the entire case vector. */
4934 if (cases)
4935 {
4936 edge e2 = find_edge (e->src, dest);
4937 tree last, first;
4938
4939 first = cases;
4940 while (cases)
4941 {
4942 last = cases;
4943 CASE_LABEL (cases) = label;
4944 cases = TREE_CHAIN (cases);
4945 }
4946
4947 /* If there was already an edge in the CFG, then we need
4948 to move all the cases associated with E to E2. */
4949 if (e2)
4950 {
4951 tree cases2 = get_cases_for_edge (e2, stmt);
4952
4953 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4954 TREE_CHAIN (cases2) = first;
4955 }
4956 }
4957 else
4958 {
4959 tree vec = SWITCH_LABELS (stmt);
4960 size_t i, n = TREE_VEC_LENGTH (vec);
4961
4962 for (i = 0; i < n; i++)
4963 {
4964 tree elt = TREE_VEC_ELT (vec, i);
4965
4966 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4967 CASE_LABEL (elt) = label;
4968 }
4969 }
4970
4971 break;
4972 }
4973
4974 case RETURN_EXPR:
4975 bsi_remove (&bsi, true);
4976 e->flags |= EDGE_FALLTHRU;
4977 break;
4978
4979 case OMP_RETURN:
4980 case OMP_CONTINUE:
4981 case OMP_SECTIONS_SWITCH:
4982 case OMP_FOR:
4983 /* The edges from OMP constructs can be simply redirected. */
4984 break;
4985
4986 default:
4987 /* Otherwise it must be a fallthru edge, and we don't need to
4988 do anything besides redirecting it. */
4989 gcc_assert (e->flags & EDGE_FALLTHRU);
4990 break;
4991 }
4992
4993 /* Update/insert PHI nodes as necessary. */
4994
4995 /* Now update the edges in the CFG. */
4996 e = ssa_redirect_edge (e, dest);
4997
4998 return e;
4999 }
5000
5001 /* Returns true if it is possible to remove edge E by redirecting
5002 it to the destination of the other edge from E->src. */
5003
5004 static bool
5005 tree_can_remove_branch_p (const_edge e)
5006 {
5007 if (e->flags & EDGE_ABNORMAL)
5008 return false;
5009
5010 return true;
5011 }
5012
5013 /* Simple wrapper, as we can always redirect fallthru edges. */
5014
5015 static basic_block
5016 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
5017 {
5018 e = tree_redirect_edge_and_branch (e, dest);
5019 gcc_assert (e);
5020
5021 return NULL;
5022 }
5023
5024
5025 /* Splits basic block BB after statement STMT (but at least after the
5026 labels). If STMT is NULL, BB is split just after the labels. */
5027
5028 static basic_block
5029 tree_split_block (basic_block bb, void *stmt)
5030 {
5031 block_stmt_iterator bsi;
5032 tree_stmt_iterator tsi_tgt;
5033 tree act, list;
5034 basic_block new_bb;
5035 edge e;
5036 edge_iterator ei;
5037
5038 new_bb = create_empty_bb (bb);
5039
5040 /* Redirect the outgoing edges. */
5041 new_bb->succs = bb->succs;
5042 bb->succs = NULL;
5043 FOR_EACH_EDGE (e, ei, new_bb->succs)
5044 e->src = new_bb;
5045
5046 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
5047 stmt = NULL;
5048
5049 /* Move everything from BSI to the new basic block. */
5050 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5051 {
5052 act = bsi_stmt (bsi);
5053 if (TREE_CODE (act) == LABEL_EXPR)
5054 continue;
5055
5056 if (!stmt)
5057 break;
5058
5059 if (stmt == act)
5060 {
5061 bsi_next (&bsi);
5062 break;
5063 }
5064 }
5065
5066 if (bsi_end_p (bsi))
5067 return new_bb;
5068
5069 /* Split the statement list - avoid re-creating new containers as this
5070 brings ugly quadratic memory consumption in the inliner.
5071 (We are still quadratic since we need to update stmt BB pointers,
5072 sadly.) */
5073 list = tsi_split_statement_list_before (&bsi.tsi);
5074 set_bb_stmt_list (new_bb, list);
5075 for (tsi_tgt = tsi_start (list);
5076 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
5077 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
5078
5079 return new_bb;
5080 }
5081
5082
5083 /* Moves basic block BB after block AFTER. */
5084
5085 static bool
5086 tree_move_block_after (basic_block bb, basic_block after)
5087 {
5088 if (bb->prev_bb == after)
5089 return true;
5090
5091 unlink_block (bb);
5092 link_block (bb, after);
5093
5094 return true;
5095 }
5096
5097
5098 /* Return true if basic_block can be duplicated. */
5099
5100 static bool
5101 tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5102 {
5103 return true;
5104 }
5105
5106
5107 /* Create a duplicate of the basic block BB. NOTE: This does not
5108 preserve SSA form. */
5109
5110 static basic_block
5111 tree_duplicate_bb (basic_block bb)
5112 {
5113 basic_block new_bb;
5114 block_stmt_iterator bsi, bsi_tgt;
5115 tree phi;
5116
5117 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5118
5119 /* Copy the PHI nodes. We ignore PHI node arguments here because
5120 the incoming edges have not been setup yet. */
5121 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5122 {
5123 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
5124 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
5125 }
5126
5127 /* Keep the chain of PHI nodes in the same order so that they can be
5128 updated by ssa_redirect_edge. */
5129 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
5130
5131 bsi_tgt = bsi_start (new_bb);
5132 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5133 {
5134 def_operand_p def_p;
5135 ssa_op_iter op_iter;
5136 tree stmt, copy;
5137 int region;
5138
5139 stmt = bsi_stmt (bsi);
5140 if (TREE_CODE (stmt) == LABEL_EXPR)
5141 continue;
5142
5143 /* Create a new copy of STMT and duplicate STMT's virtual
5144 operands. */
5145 copy = unshare_expr (stmt);
5146 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
5147 copy_virtual_operands (copy, stmt);
5148 region = lookup_stmt_eh_region (stmt);
5149 if (region >= 0)
5150 add_stmt_to_eh_region (copy, region);
5151 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5152
5153 /* Create new names for all the definitions created by COPY and
5154 add replacement mappings for each new name. */
5155 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5156 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5157 }
5158
5159 return new_bb;
5160 }
5161
5162 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5163
5164 static void
5165 add_phi_args_after_copy_edge (edge e_copy)
5166 {
5167 basic_block bb, bb_copy = e_copy->src, dest;
5168 edge e;
5169 edge_iterator ei;
5170 tree phi, phi_copy, phi_next, def;
5171
5172 if (!phi_nodes (e_copy->dest))
5173 return;
5174
5175 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5176
5177 if (e_copy->dest->flags & BB_DUPLICATED)
5178 dest = get_bb_original (e_copy->dest);
5179 else
5180 dest = e_copy->dest;
5181
5182 e = find_edge (bb, dest);
5183 if (!e)
5184 {
5185 /* During loop unrolling the target of the latch edge is copied.
5186 In this case we are not looking for edge to dest, but to
5187 duplicated block whose original was dest. */
5188 FOR_EACH_EDGE (e, ei, bb->succs)
5189 {
5190 if ((e->dest->flags & BB_DUPLICATED)
5191 && get_bb_original (e->dest) == dest)
5192 break;
5193 }
5194
5195 gcc_assert (e != NULL);
5196 }
5197
5198 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
5199 phi;
5200 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
5201 {
5202 phi_next = PHI_CHAIN (phi);
5203 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5204 add_phi_arg (phi_copy, def, e_copy);
5205 }
5206 }
5207
5208
5209 /* Basic block BB_COPY was created by code duplication. Add phi node
5210 arguments for edges going out of BB_COPY. The blocks that were
5211 duplicated have BB_DUPLICATED set. */
5212
5213 void
5214 add_phi_args_after_copy_bb (basic_block bb_copy)
5215 {
5216 edge_iterator ei;
5217 edge e_copy;
5218
5219 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5220 {
5221 add_phi_args_after_copy_edge (e_copy);
5222 }
5223 }
5224
5225 /* Blocks in REGION_COPY array of length N_REGION were created by
5226 duplication of basic blocks. Add phi node arguments for edges
5227 going from these blocks. If E_COPY is not NULL, also add
5228 phi node arguments for its destination.*/
5229
5230 void
5231 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5232 edge e_copy)
5233 {
5234 unsigned i;
5235
5236 for (i = 0; i < n_region; i++)
5237 region_copy[i]->flags |= BB_DUPLICATED;
5238
5239 for (i = 0; i < n_region; i++)
5240 add_phi_args_after_copy_bb (region_copy[i]);
5241 if (e_copy)
5242 add_phi_args_after_copy_edge (e_copy);
5243
5244 for (i = 0; i < n_region; i++)
5245 region_copy[i]->flags &= ~BB_DUPLICATED;
5246 }
5247
5248 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5249 important exit edge EXIT. By important we mean that no SSA name defined
5250 inside region is live over the other exit edges of the region. All entry
5251 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5252 to the duplicate of the region. SSA form, dominance and loop information
5253 is updated. The new basic blocks are stored to REGION_COPY in the same
5254 order as they had in REGION, provided that REGION_COPY is not NULL.
5255 The function returns false if it is unable to copy the region,
5256 true otherwise. */
5257
5258 bool
5259 tree_duplicate_sese_region (edge entry, edge exit,
5260 basic_block *region, unsigned n_region,
5261 basic_block *region_copy)
5262 {
5263 unsigned i;
5264 bool free_region_copy = false, copying_header = false;
5265 struct loop *loop = entry->dest->loop_father;
5266 edge exit_copy;
5267 VEC (basic_block, heap) *doms;
5268 edge redirected;
5269 int total_freq = 0, entry_freq = 0;
5270 gcov_type total_count = 0, entry_count = 0;
5271
5272 if (!can_copy_bbs_p (region, n_region))
5273 return false;
5274
5275 /* Some sanity checking. Note that we do not check for all possible
5276 missuses of the functions. I.e. if you ask to copy something weird,
5277 it will work, but the state of structures probably will not be
5278 correct. */
5279 for (i = 0; i < n_region; i++)
5280 {
5281 /* We do not handle subloops, i.e. all the blocks must belong to the
5282 same loop. */
5283 if (region[i]->loop_father != loop)
5284 return false;
5285
5286 if (region[i] != entry->dest
5287 && region[i] == loop->header)
5288 return false;
5289 }
5290
5291 set_loop_copy (loop, loop);
5292
5293 /* In case the function is used for loop header copying (which is the primary
5294 use), ensure that EXIT and its copy will be new latch and entry edges. */
5295 if (loop->header == entry->dest)
5296 {
5297 copying_header = true;
5298 set_loop_copy (loop, loop_outer (loop));
5299
5300 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5301 return false;
5302
5303 for (i = 0; i < n_region; i++)
5304 if (region[i] != exit->src
5305 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5306 return false;
5307 }
5308
5309 if (!region_copy)
5310 {
5311 region_copy = XNEWVEC (basic_block, n_region);
5312 free_region_copy = true;
5313 }
5314
5315 gcc_assert (!need_ssa_update_p ());
5316
5317 /* Record blocks outside the region that are dominated by something
5318 inside. */
5319 doms = NULL;
5320 initialize_original_copy_tables ();
5321
5322 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5323
5324 if (entry->dest->count)
5325 {
5326 total_count = entry->dest->count;
5327 entry_count = entry->count;
5328 /* Fix up corner cases, to avoid division by zero or creation of negative
5329 frequencies. */
5330 if (entry_count > total_count)
5331 entry_count = total_count;
5332 }
5333 else
5334 {
5335 total_freq = entry->dest->frequency;
5336 entry_freq = EDGE_FREQUENCY (entry);
5337 /* Fix up corner cases, to avoid division by zero or creation of negative
5338 frequencies. */
5339 if (total_freq == 0)
5340 total_freq = 1;
5341 else if (entry_freq > total_freq)
5342 entry_freq = total_freq;
5343 }
5344
5345 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5346 split_edge_bb_loc (entry));
5347 if (total_count)
5348 {
5349 scale_bbs_frequencies_gcov_type (region, n_region,
5350 total_count - entry_count,
5351 total_count);
5352 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5353 total_count);
5354 }
5355 else
5356 {
5357 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5358 total_freq);
5359 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5360 }
5361
5362 if (copying_header)
5363 {
5364 loop->header = exit->dest;
5365 loop->latch = exit->src;
5366 }
5367
5368 /* Redirect the entry and add the phi node arguments. */
5369 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5370 gcc_assert (redirected != NULL);
5371 flush_pending_stmts (entry);
5372
5373 /* Concerning updating of dominators: We must recount dominators
5374 for entry block and its copy. Anything that is outside of the
5375 region, but was dominated by something inside needs recounting as
5376 well. */
5377 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5378 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5379 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5380 VEC_free (basic_block, heap, doms);
5381
5382 /* Add the other PHI node arguments. */
5383 add_phi_args_after_copy (region_copy, n_region, NULL);
5384
5385 /* Update the SSA web. */
5386 update_ssa (TODO_update_ssa);
5387
5388 if (free_region_copy)
5389 free (region_copy);
5390
5391 free_original_copy_tables ();
5392 return true;
5393 }
5394
5395 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5396 are stored to REGION_COPY in the same order in that they appear
5397 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5398 the region, EXIT an exit from it. The condition guarding EXIT
5399 is moved to ENTRY. Returns true if duplication succeeds, false
5400 otherwise.
5401
5402 For example,
5403
5404 some_code;
5405 if (cond)
5406 A;
5407 else
5408 B;
5409
5410 is transformed to
5411
5412 if (cond)
5413 {
5414 some_code;
5415 A;
5416 }
5417 else
5418 {
5419 some_code;
5420 B;
5421 }
5422 */
5423
5424 bool
5425 tree_duplicate_sese_tail (edge entry, edge exit,
5426 basic_block *region, unsigned n_region,
5427 basic_block *region_copy)
5428 {
5429 unsigned i;
5430 bool free_region_copy = false;
5431 struct loop *loop = exit->dest->loop_father;
5432 struct loop *orig_loop = entry->dest->loop_father;
5433 basic_block switch_bb, entry_bb, nentry_bb;
5434 VEC (basic_block, heap) *doms;
5435 int total_freq = 0, exit_freq = 0;
5436 gcov_type total_count = 0, exit_count = 0;
5437 edge exits[2], nexits[2], e;
5438 block_stmt_iterator bsi;
5439 tree cond;
5440 edge sorig, snew;
5441
5442 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5443 exits[0] = exit;
5444 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5445
5446 if (!can_copy_bbs_p (region, n_region))
5447 return false;
5448
5449 /* Some sanity checking. Note that we do not check for all possible
5450 missuses of the functions. I.e. if you ask to copy something weird
5451 (e.g., in the example, if there is a jump from inside to the middle
5452 of some_code, or come_code defines some of the values used in cond)
5453 it will work, but the resulting code will not be correct. */
5454 for (i = 0; i < n_region; i++)
5455 {
5456 /* We do not handle subloops, i.e. all the blocks must belong to the
5457 same loop. */
5458 if (region[i]->loop_father != orig_loop)
5459 return false;
5460
5461 if (region[i] == orig_loop->latch)
5462 return false;
5463 }
5464
5465 initialize_original_copy_tables ();
5466 set_loop_copy (orig_loop, loop);
5467
5468 if (!region_copy)
5469 {
5470 region_copy = XNEWVEC (basic_block, n_region);
5471 free_region_copy = true;
5472 }
5473
5474 gcc_assert (!need_ssa_update_p ());
5475
5476 /* Record blocks outside the region that are dominated by something
5477 inside. */
5478 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5479
5480 if (exit->src->count)
5481 {
5482 total_count = exit->src->count;
5483 exit_count = exit->count;
5484 /* Fix up corner cases, to avoid division by zero or creation of negative
5485 frequencies. */
5486 if (exit_count > total_count)
5487 exit_count = total_count;
5488 }
5489 else
5490 {
5491 total_freq = exit->src->frequency;
5492 exit_freq = EDGE_FREQUENCY (exit);
5493 /* Fix up corner cases, to avoid division by zero or creation of negative
5494 frequencies. */
5495 if (total_freq == 0)
5496 total_freq = 1;
5497 if (exit_freq > total_freq)
5498 exit_freq = total_freq;
5499 }
5500
5501 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5502 split_edge_bb_loc (exit));
5503 if (total_count)
5504 {
5505 scale_bbs_frequencies_gcov_type (region, n_region,
5506 total_count - exit_count,
5507 total_count);
5508 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5509 total_count);
5510 }
5511 else
5512 {
5513 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5514 total_freq);
5515 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5516 }
5517
5518 /* Create the switch block, and put the exit condition to it. */
5519 entry_bb = entry->dest;
5520 nentry_bb = get_bb_copy (entry_bb);
5521 if (!last_stmt (entry->src)
5522 || !stmt_ends_bb_p (last_stmt (entry->src)))
5523 switch_bb = entry->src;
5524 else
5525 switch_bb = split_edge (entry);
5526 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5527
5528 bsi = bsi_last (switch_bb);
5529 cond = last_stmt (exit->src);
5530 gcc_assert (TREE_CODE (cond) == COND_EXPR);
5531 bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
5532
5533 sorig = single_succ_edge (switch_bb);
5534 sorig->flags = exits[1]->flags;
5535 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5536
5537 /* Register the new edge from SWITCH_BB in loop exit lists. */
5538 rescan_loop_exit (snew, true, false);
5539
5540 /* Add the PHI node arguments. */
5541 add_phi_args_after_copy (region_copy, n_region, snew);
5542
5543 /* Get rid of now superfluous conditions and associated edges (and phi node
5544 arguments). */
5545 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5546 PENDING_STMT (e) = NULL_TREE;
5547 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5548 PENDING_STMT (e) = NULL_TREE;
5549
5550 /* Anything that is outside of the region, but was dominated by something
5551 inside needs to update dominance info. */
5552 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5553 VEC_free (basic_block, heap, doms);
5554
5555 /* Update the SSA web. */
5556 update_ssa (TODO_update_ssa);
5557
5558 if (free_region_copy)
5559 free (region_copy);
5560
5561 free_original_copy_tables ();
5562 return true;
5563 }
5564
5565 /*
5566 DEF_VEC_P(basic_block);
5567 DEF_VEC_ALLOC_P(basic_block,heap);
5568 */
5569
5570 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5571 adding blocks when the dominator traversal reaches EXIT. This
5572 function silently assumes that ENTRY strictly dominates EXIT. */
5573
5574 void
5575 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5576 VEC(basic_block,heap) **bbs_p)
5577 {
5578 basic_block son;
5579
5580 for (son = first_dom_son (CDI_DOMINATORS, entry);
5581 son;
5582 son = next_dom_son (CDI_DOMINATORS, son))
5583 {
5584 VEC_safe_push (basic_block, heap, *bbs_p, son);
5585 if (son != exit)
5586 gather_blocks_in_sese_region (son, exit, bbs_p);
5587 }
5588 }
5589
5590 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5591 The duplicates are recorded in VARS_MAP. */
5592
5593 static void
5594 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5595 tree to_context)
5596 {
5597 tree t = *tp, new_t;
5598 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5599 void **loc;
5600
5601 if (DECL_CONTEXT (t) == to_context)
5602 return;
5603
5604 loc = pointer_map_contains (vars_map, t);
5605
5606 if (!loc)
5607 {
5608 loc = pointer_map_insert (vars_map, t);
5609
5610 if (SSA_VAR_P (t))
5611 {
5612 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5613 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5614 }
5615 else
5616 {
5617 gcc_assert (TREE_CODE (t) == CONST_DECL);
5618 new_t = copy_node (t);
5619 }
5620 DECL_CONTEXT (new_t) = to_context;
5621
5622 *loc = new_t;
5623 }
5624 else
5625 new_t = *loc;
5626
5627 *tp = new_t;
5628 }
5629
5630 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5631 VARS_MAP maps old ssa names and var_decls to the new ones. */
5632
5633 static tree
5634 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5635 tree to_context)
5636 {
5637 void **loc;
5638 tree new_name, decl = SSA_NAME_VAR (name);
5639
5640 gcc_assert (is_gimple_reg (name));
5641
5642 loc = pointer_map_contains (vars_map, name);
5643
5644 if (!loc)
5645 {
5646 replace_by_duplicate_decl (&decl, vars_map, to_context);
5647
5648 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5649 if (gimple_in_ssa_p (cfun))
5650 add_referenced_var (decl);
5651
5652 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5653 if (SSA_NAME_IS_DEFAULT_DEF (name))
5654 set_default_def (decl, new_name);
5655 pop_cfun ();
5656
5657 loc = pointer_map_insert (vars_map, name);
5658 *loc = new_name;
5659 }
5660 else
5661 new_name = *loc;
5662
5663 return new_name;
5664 }
5665
5666 struct move_stmt_d
5667 {
5668 tree block;
5669 tree from_context;
5670 tree to_context;
5671 struct pointer_map_t *vars_map;
5672 htab_t new_label_map;
5673 bool remap_decls_p;
5674 };
5675
5676 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5677 contained in *TP and change the DECL_CONTEXT of every local
5678 variable referenced in *TP. */
5679
5680 static tree
5681 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
5682 {
5683 struct move_stmt_d *p = (struct move_stmt_d *) data;
5684 tree t = *tp;
5685
5686 if (p->block
5687 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
5688 TREE_BLOCK (t) = p->block;
5689
5690 if (OMP_DIRECTIVE_P (t)
5691 && TREE_CODE (t) != OMP_RETURN
5692 && TREE_CODE (t) != OMP_CONTINUE)
5693 {
5694 /* Do not remap variables inside OMP directives. Variables
5695 referenced in clauses and directive header belong to the
5696 parent function and should not be moved into the child
5697 function. */
5698 bool save_remap_decls_p = p->remap_decls_p;
5699 p->remap_decls_p = false;
5700 *walk_subtrees = 0;
5701
5702 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
5703
5704 p->remap_decls_p = save_remap_decls_p;
5705 }
5706 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5707 {
5708 if (TREE_CODE (t) == SSA_NAME)
5709 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5710 else if (TREE_CODE (t) == LABEL_DECL)
5711 {
5712 if (p->new_label_map)
5713 {
5714 struct tree_map in, *out;
5715 in.base.from = t;
5716 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5717 if (out)
5718 *tp = t = out->to;
5719 }
5720
5721 DECL_CONTEXT (t) = p->to_context;
5722 }
5723 else if (p->remap_decls_p)
5724 {
5725 /* Replace T with its duplicate. T should no longer appear in the
5726 parent function, so this looks wasteful; however, it may appear
5727 in referenced_vars, and more importantly, as virtual operands of
5728 statements, and in alias lists of other variables. It would be
5729 quite difficult to expunge it from all those places. ??? It might
5730 suffice to do this for addressable variables. */
5731 if ((TREE_CODE (t) == VAR_DECL
5732 && !is_global_var (t))
5733 || TREE_CODE (t) == CONST_DECL)
5734 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5735
5736 if (SSA_VAR_P (t)
5737 && gimple_in_ssa_p (cfun))
5738 {
5739 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5740 add_referenced_var (*tp);
5741 pop_cfun ();
5742 }
5743 }
5744 *walk_subtrees = 0;
5745 }
5746 else if (TYPE_P (t))
5747 *walk_subtrees = 0;
5748
5749 return NULL_TREE;
5750 }
5751
5752 /* Marks virtual operands of all statements in basic blocks BBS for
5753 renaming. */
5754
5755 void
5756 mark_virtual_ops_in_bb (basic_block bb)
5757 {
5758 tree phi;
5759 block_stmt_iterator bsi;
5760
5761 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
5762 mark_virtual_ops_for_renaming (phi);
5763
5764 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
5765 mark_virtual_ops_for_renaming (bsi_stmt (bsi));
5766 }
5767
5768 /* Marks virtual operands of all statements in basic blocks BBS for
5769 renaming. */
5770
5771 static void
5772 mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
5773 {
5774 basic_block bb;
5775 unsigned i;
5776
5777 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
5778 mark_virtual_ops_in_bb (bb);
5779 }
5780
5781 /* Move basic block BB from function CFUN to function DEST_FN. The
5782 block is moved out of the original linked list and placed after
5783 block AFTER in the new list. Also, the block is removed from the
5784 original array of blocks and placed in DEST_FN's array of blocks.
5785 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5786 updated to reflect the moved edges.
5787
5788 The local variables are remapped to new instances, VARS_MAP is used
5789 to record the mapping. */
5790
5791 static void
5792 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5793 basic_block after, bool update_edge_count_p,
5794 struct pointer_map_t *vars_map, htab_t new_label_map,
5795 int eh_offset)
5796 {
5797 struct control_flow_graph *cfg;
5798 edge_iterator ei;
5799 edge e;
5800 block_stmt_iterator si;
5801 struct move_stmt_d d;
5802 unsigned old_len, new_len;
5803 tree phi, next_phi;
5804
5805 /* Remove BB from dominance structures. */
5806 delete_from_dominance_info (CDI_DOMINATORS, bb);
5807 if (current_loops)
5808 remove_bb_from_loops (bb);
5809
5810 /* Link BB to the new linked list. */
5811 move_block_after (bb, after);
5812
5813 /* Update the edge count in the corresponding flowgraphs. */
5814 if (update_edge_count_p)
5815 FOR_EACH_EDGE (e, ei, bb->succs)
5816 {
5817 cfun->cfg->x_n_edges--;
5818 dest_cfun->cfg->x_n_edges++;
5819 }
5820
5821 /* Remove BB from the original basic block array. */
5822 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5823 cfun->cfg->x_n_basic_blocks--;
5824
5825 /* Grow DEST_CFUN's basic block array if needed. */
5826 cfg = dest_cfun->cfg;
5827 cfg->x_n_basic_blocks++;
5828 if (bb->index >= cfg->x_last_basic_block)
5829 cfg->x_last_basic_block = bb->index + 1;
5830
5831 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5832 if ((unsigned) cfg->x_last_basic_block >= old_len)
5833 {
5834 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5835 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5836 new_len);
5837 }
5838
5839 VEC_replace (basic_block, cfg->x_basic_block_info,
5840 bb->index, bb);
5841
5842 /* Remap the variables in phi nodes. */
5843 for (phi = phi_nodes (bb); phi; phi = next_phi)
5844 {
5845 use_operand_p use;
5846 tree op = PHI_RESULT (phi);
5847 ssa_op_iter oi;
5848
5849 next_phi = PHI_CHAIN (phi);
5850 if (!is_gimple_reg (op))
5851 {
5852 /* Remove the phi nodes for virtual operands (alias analysis will be
5853 run for the new function, anyway). */
5854 remove_phi_node (phi, NULL, true);
5855 continue;
5856 }
5857
5858 SET_PHI_RESULT (phi, replace_ssa_name (op, vars_map, dest_cfun->decl));
5859 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5860 {
5861 op = USE_FROM_PTR (use);
5862 if (TREE_CODE (op) == SSA_NAME)
5863 SET_USE (use, replace_ssa_name (op, vars_map, dest_cfun->decl));
5864 }
5865 }
5866
5867 /* The statements in BB need to be associated with a new TREE_BLOCK.
5868 Labels need to be associated with a new label-to-block map. */
5869 memset (&d, 0, sizeof (d));
5870 d.vars_map = vars_map;
5871 d.from_context = cfun->decl;
5872 d.to_context = dest_cfun->decl;
5873 d.new_label_map = new_label_map;
5874
5875 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5876 {
5877 tree stmt = bsi_stmt (si);
5878 int region;
5879
5880 d.remap_decls_p = true;
5881 if (TREE_BLOCK (stmt))
5882 d.block = DECL_INITIAL (dest_cfun->decl);
5883
5884 walk_tree (&stmt, move_stmt_r, &d, NULL);
5885
5886 if (TREE_CODE (stmt) == LABEL_EXPR)
5887 {
5888 tree label = LABEL_EXPR_LABEL (stmt);
5889 int uid = LABEL_DECL_UID (label);
5890
5891 gcc_assert (uid > -1);
5892
5893 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5894 if (old_len <= (unsigned) uid)
5895 {
5896 new_len = 3 * uid / 2;
5897 VEC_safe_grow_cleared (basic_block, gc,
5898 cfg->x_label_to_block_map, new_len);
5899 }
5900
5901 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5902 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5903
5904 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5905
5906 if (uid >= dest_cfun->cfg->last_label_uid)
5907 dest_cfun->cfg->last_label_uid = uid + 1;
5908 }
5909 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
5910 TREE_OPERAND (stmt, 0) =
5911 build_int_cst (NULL_TREE,
5912 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
5913 + eh_offset);
5914
5915 region = lookup_stmt_eh_region (stmt);
5916 if (region >= 0)
5917 {
5918 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5919 remove_stmt_from_eh_region (stmt);
5920 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5921 gimple_remove_stmt_histograms (cfun, stmt);
5922 }
5923
5924 /* We cannot leave any operands allocated from the operand caches of
5925 the current function. */
5926 free_stmt_operands (stmt);
5927 push_cfun (dest_cfun);
5928 update_stmt (stmt);
5929 pop_cfun ();
5930 }
5931 }
5932
5933 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5934 the outermost EH region. Use REGION as the incoming base EH region. */
5935
5936 static int
5937 find_outermost_region_in_block (struct function *src_cfun,
5938 basic_block bb, int region)
5939 {
5940 block_stmt_iterator si;
5941
5942 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
5943 {
5944 tree stmt = bsi_stmt (si);
5945 int stmt_region;
5946
5947 if (TREE_CODE (stmt) == RESX_EXPR)
5948 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
5949 else
5950 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5951 if (stmt_region > 0)
5952 {
5953 if (region < 0)
5954 region = stmt_region;
5955 else if (stmt_region != region)
5956 {
5957 region = eh_region_outermost (src_cfun, stmt_region, region);
5958 gcc_assert (region != -1);
5959 }
5960 }
5961 }
5962
5963 return region;
5964 }
5965
5966 static tree
5967 new_label_mapper (tree decl, void *data)
5968 {
5969 htab_t hash = (htab_t) data;
5970 struct tree_map *m;
5971 void **slot;
5972
5973 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5974
5975 m = xmalloc (sizeof (struct tree_map));
5976 m->hash = DECL_UID (decl);
5977 m->base.from = decl;
5978 m->to = create_artificial_label ();
5979 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5980 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5981 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5982
5983 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5984 gcc_assert (*slot == NULL);
5985
5986 *slot = m;
5987
5988 return m->to;
5989 }
5990
5991 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5992 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5993 single basic block in the original CFG and the new basic block is
5994 returned. DEST_CFUN must not have a CFG yet.
5995
5996 Note that the region need not be a pure SESE region. Blocks inside
5997 the region may contain calls to abort/exit. The only restriction
5998 is that ENTRY_BB should be the only entry point and it must
5999 dominate EXIT_BB.
6000
6001 All local variables referenced in the region are assumed to be in
6002 the corresponding BLOCK_VARS and unexpanded variable lists
6003 associated with DEST_CFUN. */
6004
6005 basic_block
6006 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6007 basic_block exit_bb)
6008 {
6009 VEC(basic_block,heap) *bbs, *dom_bbs;
6010 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6011 basic_block after, bb, *entry_pred, *exit_succ, abb;
6012 struct function *saved_cfun = cfun;
6013 int *entry_flag, *exit_flag, eh_offset;
6014 unsigned *entry_prob, *exit_prob;
6015 unsigned i, num_entry_edges, num_exit_edges;
6016 edge e;
6017 edge_iterator ei;
6018 htab_t new_label_map;
6019 struct pointer_map_t *vars_map;
6020 struct loop *loop = entry_bb->loop_father;
6021
6022 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6023 region. */
6024 gcc_assert (entry_bb != exit_bb
6025 && (!exit_bb
6026 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6027
6028 /* Collect all the blocks in the region. Manually add ENTRY_BB
6029 because it won't be added by dfs_enumerate_from. */
6030 bbs = NULL;
6031 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6032 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6033
6034 /* The blocks that used to be dominated by something in BBS will now be
6035 dominated by the new block. */
6036 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6037 VEC_address (basic_block, bbs),
6038 VEC_length (basic_block, bbs));
6039
6040 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6041 the predecessor edges to ENTRY_BB and the successor edges to
6042 EXIT_BB so that we can re-attach them to the new basic block that
6043 will replace the region. */
6044 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6045 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6046 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6047 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6048 i = 0;
6049 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6050 {
6051 entry_prob[i] = e->probability;
6052 entry_flag[i] = e->flags;
6053 entry_pred[i++] = e->src;
6054 remove_edge (e);
6055 }
6056
6057 if (exit_bb)
6058 {
6059 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6060 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6061 sizeof (basic_block));
6062 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6063 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6064 i = 0;
6065 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6066 {
6067 exit_prob[i] = e->probability;
6068 exit_flag[i] = e->flags;
6069 exit_succ[i++] = e->dest;
6070 remove_edge (e);
6071 }
6072 }
6073 else
6074 {
6075 num_exit_edges = 0;
6076 exit_succ = NULL;
6077 exit_flag = NULL;
6078 exit_prob = NULL;
6079 }
6080
6081 /* Switch context to the child function to initialize DEST_FN's CFG. */
6082 gcc_assert (dest_cfun->cfg == NULL);
6083 push_cfun (dest_cfun);
6084
6085 init_empty_tree_cfg ();
6086
6087 /* Initialize EH information for the new function. */
6088 eh_offset = 0;
6089 new_label_map = NULL;
6090 if (saved_cfun->eh)
6091 {
6092 int region = -1;
6093
6094 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6095 region = find_outermost_region_in_block (saved_cfun, bb, region);
6096
6097 init_eh_for_function ();
6098 if (region != -1)
6099 {
6100 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6101 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6102 new_label_map, region, 0);
6103 }
6104 }
6105
6106 pop_cfun ();
6107
6108 /* The ssa form for virtual operands in the source function will have to
6109 be repaired. We do not care for the real operands -- the sese region
6110 must be closed with respect to those. */
6111 mark_virtual_ops_in_region (bbs);
6112
6113 /* Move blocks from BBS into DEST_CFUN. */
6114 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6115 after = dest_cfun->cfg->x_entry_block_ptr;
6116 vars_map = pointer_map_create ();
6117 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6118 {
6119 /* No need to update edge counts on the last block. It has
6120 already been updated earlier when we detached the region from
6121 the original CFG. */
6122 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_map,
6123 new_label_map, eh_offset);
6124 after = bb;
6125 }
6126
6127 if (new_label_map)
6128 htab_delete (new_label_map);
6129 pointer_map_destroy (vars_map);
6130
6131 /* Rewire the entry and exit blocks. The successor to the entry
6132 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6133 the child function. Similarly, the predecessor of DEST_FN's
6134 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6135 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6136 various CFG manipulation function get to the right CFG.
6137
6138 FIXME, this is silly. The CFG ought to become a parameter to
6139 these helpers. */
6140 push_cfun (dest_cfun);
6141 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6142 if (exit_bb)
6143 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6144 pop_cfun ();
6145
6146 /* Back in the original function, the SESE region has disappeared,
6147 create a new basic block in its place. */
6148 bb = create_empty_bb (entry_pred[0]);
6149 if (current_loops)
6150 add_bb_to_loop (bb, loop);
6151 for (i = 0; i < num_entry_edges; i++)
6152 {
6153 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6154 e->probability = entry_prob[i];
6155 }
6156
6157 for (i = 0; i < num_exit_edges; i++)
6158 {
6159 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6160 e->probability = exit_prob[i];
6161 }
6162
6163 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6164 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6165 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6166 VEC_free (basic_block, heap, dom_bbs);
6167
6168 if (exit_bb)
6169 {
6170 free (exit_prob);
6171 free (exit_flag);
6172 free (exit_succ);
6173 }
6174 free (entry_prob);
6175 free (entry_flag);
6176 free (entry_pred);
6177 VEC_free (basic_block, heap, bbs);
6178
6179 return bb;
6180 }
6181
6182
6183 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
6184
6185 void
6186 dump_function_to_file (tree fn, FILE *file, int flags)
6187 {
6188 tree arg, vars, var;
6189 struct function *dsf;
6190 bool ignore_topmost_bind = false, any_var = false;
6191 basic_block bb;
6192 tree chain;
6193
6194 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6195
6196 arg = DECL_ARGUMENTS (fn);
6197 while (arg)
6198 {
6199 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6200 fprintf (file, " ");
6201 print_generic_expr (file, arg, dump_flags);
6202 if (flags & TDF_VERBOSE)
6203 print_node (file, "", arg, 4);
6204 if (TREE_CHAIN (arg))
6205 fprintf (file, ", ");
6206 arg = TREE_CHAIN (arg);
6207 }
6208 fprintf (file, ")\n");
6209
6210 if (flags & TDF_VERBOSE)
6211 print_node (file, "", fn, 2);
6212
6213 dsf = DECL_STRUCT_FUNCTION (fn);
6214 if (dsf && (flags & TDF_DETAILS))
6215 dump_eh_tree (file, dsf);
6216
6217 if (flags & TDF_RAW)
6218 {
6219 dump_node (fn, TDF_SLIM | flags, file);
6220 return;
6221 }
6222
6223 /* Switch CFUN to point to FN. */
6224 push_cfun (DECL_STRUCT_FUNCTION (fn));
6225
6226 /* When GIMPLE is lowered, the variables are no longer available in
6227 BIND_EXPRs, so display them separately. */
6228 if (cfun && cfun->decl == fn && cfun->local_decls)
6229 {
6230 ignore_topmost_bind = true;
6231
6232 fprintf (file, "{\n");
6233 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6234 {
6235 var = TREE_VALUE (vars);
6236
6237 print_generic_decl (file, var, flags);
6238 if (flags & TDF_VERBOSE)
6239 print_node (file, "", var, 4);
6240 fprintf (file, "\n");
6241
6242 any_var = true;
6243 }
6244 }
6245
6246 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6247 {
6248 /* Make a CFG based dump. */
6249 check_bb_profile (ENTRY_BLOCK_PTR, file);
6250 if (!ignore_topmost_bind)
6251 fprintf (file, "{\n");
6252
6253 if (any_var && n_basic_blocks)
6254 fprintf (file, "\n");
6255
6256 FOR_EACH_BB (bb)
6257 dump_generic_bb (file, bb, 2, flags);
6258
6259 fprintf (file, "}\n");
6260 check_bb_profile (EXIT_BLOCK_PTR, file);
6261 }
6262 else
6263 {
6264 int indent;
6265
6266 /* Make a tree based dump. */
6267 chain = DECL_SAVED_TREE (fn);
6268
6269 if (chain && TREE_CODE (chain) == BIND_EXPR)
6270 {
6271 if (ignore_topmost_bind)
6272 {
6273 chain = BIND_EXPR_BODY (chain);
6274 indent = 2;
6275 }
6276 else
6277 indent = 0;
6278 }
6279 else
6280 {
6281 if (!ignore_topmost_bind)
6282 fprintf (file, "{\n");
6283 indent = 2;
6284 }
6285
6286 if (any_var)
6287 fprintf (file, "\n");
6288
6289 print_generic_stmt_indented (file, chain, flags, indent);
6290 if (ignore_topmost_bind)
6291 fprintf (file, "}\n");
6292 }
6293
6294 fprintf (file, "\n\n");
6295
6296 /* Restore CFUN. */
6297 pop_cfun ();
6298 }
6299
6300
6301 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6302
6303 void
6304 debug_function (tree fn, int flags)
6305 {
6306 dump_function_to_file (fn, stderr, flags);
6307 }
6308
6309
6310 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6311
6312 static void
6313 print_pred_bbs (FILE *file, basic_block bb)
6314 {
6315 edge e;
6316 edge_iterator ei;
6317
6318 FOR_EACH_EDGE (e, ei, bb->preds)
6319 fprintf (file, "bb_%d ", e->src->index);
6320 }
6321
6322
6323 /* Print on FILE the indexes for the successors of basic_block BB. */
6324
6325 static void
6326 print_succ_bbs (FILE *file, basic_block bb)
6327 {
6328 edge e;
6329 edge_iterator ei;
6330
6331 FOR_EACH_EDGE (e, ei, bb->succs)
6332 fprintf (file, "bb_%d ", e->dest->index);
6333 }
6334
6335 /* Print to FILE the basic block BB following the VERBOSITY level. */
6336
6337 void
6338 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6339 {
6340 char *s_indent = (char *) alloca ((size_t) indent + 1);
6341 memset ((void *) s_indent, ' ', (size_t) indent);
6342 s_indent[indent] = '\0';
6343
6344 /* Print basic_block's header. */
6345 if (verbosity >= 2)
6346 {
6347 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6348 print_pred_bbs (file, bb);
6349 fprintf (file, "}, succs = {");
6350 print_succ_bbs (file, bb);
6351 fprintf (file, "})\n");
6352 }
6353
6354 /* Print basic_block's body. */
6355 if (verbosity >= 3)
6356 {
6357 fprintf (file, "%s {\n", s_indent);
6358 tree_dump_bb (bb, file, indent + 4);
6359 fprintf (file, "%s }\n", s_indent);
6360 }
6361 }
6362
6363 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6364
6365 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6366 VERBOSITY level this outputs the contents of the loop, or just its
6367 structure. */
6368
6369 static void
6370 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6371 {
6372 char *s_indent;
6373 basic_block bb;
6374
6375 if (loop == NULL)
6376 return;
6377
6378 s_indent = (char *) alloca ((size_t) indent + 1);
6379 memset ((void *) s_indent, ' ', (size_t) indent);
6380 s_indent[indent] = '\0';
6381
6382 /* Print loop's header. */
6383 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6384 loop->num, loop->header->index, loop->latch->index);
6385 fprintf (file, ", niter = ");
6386 print_generic_expr (file, loop->nb_iterations, 0);
6387
6388 if (loop->any_upper_bound)
6389 {
6390 fprintf (file, ", upper_bound = ");
6391 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6392 }
6393
6394 if (loop->any_estimate)
6395 {
6396 fprintf (file, ", estimate = ");
6397 dump_double_int (file, loop->nb_iterations_estimate, true);
6398 }
6399 fprintf (file, ")\n");
6400
6401 /* Print loop's body. */
6402 if (verbosity >= 1)
6403 {
6404 fprintf (file, "%s{\n", s_indent);
6405 FOR_EACH_BB (bb)
6406 if (bb->loop_father == loop)
6407 print_loops_bb (file, bb, indent, verbosity);
6408
6409 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6410 fprintf (file, "%s}\n", s_indent);
6411 }
6412 }
6413
6414 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6415 spaces. Following VERBOSITY level this outputs the contents of the
6416 loop, or just its structure. */
6417
6418 static void
6419 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6420 {
6421 if (loop == NULL)
6422 return;
6423
6424 print_loop (file, loop, indent, verbosity);
6425 print_loop_and_siblings (file, loop->next, indent, verbosity);
6426 }
6427
6428 /* Follow a CFG edge from the entry point of the program, and on entry
6429 of a loop, pretty print the loop structure on FILE. */
6430
6431 void
6432 print_loops (FILE *file, int verbosity)
6433 {
6434 basic_block bb;
6435
6436 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
6437 if (bb && bb->loop_father)
6438 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6439 }
6440
6441
6442 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6443
6444 void
6445 debug_loops (int verbosity)
6446 {
6447 print_loops (stderr, verbosity);
6448 }
6449
6450 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6451
6452 void
6453 debug_loop (struct loop *loop, int verbosity)
6454 {
6455 print_loop (stderr, loop, 0, verbosity);
6456 }
6457
6458 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6459 level. */
6460
6461 void
6462 debug_loop_num (unsigned num, int verbosity)
6463 {
6464 debug_loop (get_loop (num), verbosity);
6465 }
6466
6467 /* Return true if BB ends with a call, possibly followed by some
6468 instructions that must stay with the call. Return false,
6469 otherwise. */
6470
6471 static bool
6472 tree_block_ends_with_call_p (basic_block bb)
6473 {
6474 block_stmt_iterator bsi = bsi_last (bb);
6475 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
6476 }
6477
6478
6479 /* Return true if BB ends with a conditional branch. Return false,
6480 otherwise. */
6481
6482 static bool
6483 tree_block_ends_with_condjump_p (const_basic_block bb)
6484 {
6485 /* This CONST_CAST is okay because last_stmt doesn't modify its
6486 argument and the return value is not modified. */
6487 const_tree stmt = last_stmt (CONST_CAST_BB(bb));
6488 return (stmt && TREE_CODE (stmt) == COND_EXPR);
6489 }
6490
6491
6492 /* Return true if we need to add fake edge to exit at statement T.
6493 Helper function for tree_flow_call_edges_add. */
6494
6495 static bool
6496 need_fake_edge_p (tree t)
6497 {
6498 tree call, fndecl = NULL_TREE;
6499 int call_flags;
6500
6501 /* NORETURN and LONGJMP calls already have an edge to exit.
6502 CONST and PURE calls do not need one.
6503 We don't currently check for CONST and PURE here, although
6504 it would be a good idea, because those attributes are
6505 figured out from the RTL in mark_constant_function, and
6506 the counter incrementation code from -fprofile-arcs
6507 leads to different results from -fbranch-probabilities. */
6508 call = get_call_expr_in (t);
6509 if (call)
6510 {
6511 fndecl = get_callee_fndecl (call);
6512 call_flags = call_expr_flags (call);
6513 }
6514
6515 if (call && fndecl && DECL_BUILT_IN (fndecl)
6516 && (call_flags & ECF_NOTHROW)
6517 && !(call_flags & ECF_NORETURN)
6518 && !(call_flags & ECF_RETURNS_TWICE))
6519 return false;
6520
6521 if (call && !(call_flags & ECF_NORETURN))
6522 return true;
6523
6524 if (TREE_CODE (t) == ASM_EXPR
6525 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
6526 return true;
6527
6528 return false;
6529 }
6530
6531
6532 /* Add fake edges to the function exit for any non constant and non
6533 noreturn calls, volatile inline assembly in the bitmap of blocks
6534 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6535 the number of blocks that were split.
6536
6537 The goal is to expose cases in which entering a basic block does
6538 not imply that all subsequent instructions must be executed. */
6539
6540 static int
6541 tree_flow_call_edges_add (sbitmap blocks)
6542 {
6543 int i;
6544 int blocks_split = 0;
6545 int last_bb = last_basic_block;
6546 bool check_last_block = false;
6547
6548 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6549 return 0;
6550
6551 if (! blocks)
6552 check_last_block = true;
6553 else
6554 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6555
6556 /* In the last basic block, before epilogue generation, there will be
6557 a fallthru edge to EXIT. Special care is required if the last insn
6558 of the last basic block is a call because make_edge folds duplicate
6559 edges, which would result in the fallthru edge also being marked
6560 fake, which would result in the fallthru edge being removed by
6561 remove_fake_edges, which would result in an invalid CFG.
6562
6563 Moreover, we can't elide the outgoing fake edge, since the block
6564 profiler needs to take this into account in order to solve the minimal
6565 spanning tree in the case that the call doesn't return.
6566
6567 Handle this by adding a dummy instruction in a new last basic block. */
6568 if (check_last_block)
6569 {
6570 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6571 block_stmt_iterator bsi = bsi_last (bb);
6572 tree t = NULL_TREE;
6573 if (!bsi_end_p (bsi))
6574 t = bsi_stmt (bsi);
6575
6576 if (t && need_fake_edge_p (t))
6577 {
6578 edge e;
6579
6580 e = find_edge (bb, EXIT_BLOCK_PTR);
6581 if (e)
6582 {
6583 bsi_insert_on_edge (e, build_empty_stmt ());
6584 bsi_commit_edge_inserts ();
6585 }
6586 }
6587 }
6588
6589 /* Now add fake edges to the function exit for any non constant
6590 calls since there is no way that we can determine if they will
6591 return or not... */
6592 for (i = 0; i < last_bb; i++)
6593 {
6594 basic_block bb = BASIC_BLOCK (i);
6595 block_stmt_iterator bsi;
6596 tree stmt, last_stmt;
6597
6598 if (!bb)
6599 continue;
6600
6601 if (blocks && !TEST_BIT (blocks, i))
6602 continue;
6603
6604 bsi = bsi_last (bb);
6605 if (!bsi_end_p (bsi))
6606 {
6607 last_stmt = bsi_stmt (bsi);
6608 do
6609 {
6610 stmt = bsi_stmt (bsi);
6611 if (need_fake_edge_p (stmt))
6612 {
6613 edge e;
6614 /* The handling above of the final block before the
6615 epilogue should be enough to verify that there is
6616 no edge to the exit block in CFG already.
6617 Calling make_edge in such case would cause us to
6618 mark that edge as fake and remove it later. */
6619 #ifdef ENABLE_CHECKING
6620 if (stmt == last_stmt)
6621 {
6622 e = find_edge (bb, EXIT_BLOCK_PTR);
6623 gcc_assert (e == NULL);
6624 }
6625 #endif
6626
6627 /* Note that the following may create a new basic block
6628 and renumber the existing basic blocks. */
6629 if (stmt != last_stmt)
6630 {
6631 e = split_block (bb, stmt);
6632 if (e)
6633 blocks_split++;
6634 }
6635 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6636 }
6637 bsi_prev (&bsi);
6638 }
6639 while (!bsi_end_p (bsi));
6640 }
6641 }
6642
6643 if (blocks_split)
6644 verify_flow_info ();
6645
6646 return blocks_split;
6647 }
6648
6649 /* Purge dead abnormal call edges from basic block BB. */
6650
6651 bool
6652 tree_purge_dead_abnormal_call_edges (basic_block bb)
6653 {
6654 bool changed = tree_purge_dead_eh_edges (bb);
6655
6656 if (cfun->has_nonlocal_label)
6657 {
6658 tree stmt = last_stmt (bb);
6659 edge_iterator ei;
6660 edge e;
6661
6662 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
6663 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6664 {
6665 if (e->flags & EDGE_ABNORMAL)
6666 {
6667 remove_edge (e);
6668 changed = true;
6669 }
6670 else
6671 ei_next (&ei);
6672 }
6673
6674 /* See tree_purge_dead_eh_edges below. */
6675 if (changed)
6676 free_dominance_info (CDI_DOMINATORS);
6677 }
6678
6679 return changed;
6680 }
6681
6682 /* Stores all basic blocks dominated by BB to DOM_BBS. */
6683
6684 static void
6685 get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6686 {
6687 basic_block son;
6688
6689 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6690 for (son = first_dom_son (CDI_DOMINATORS, bb);
6691 son;
6692 son = next_dom_son (CDI_DOMINATORS, son))
6693 get_all_dominated_blocks (son, dom_bbs);
6694 }
6695
6696 /* Removes edge E and all the blocks dominated by it, and updates dominance
6697 information. The IL in E->src needs to be updated separately.
6698 If dominance info is not available, only the edge E is removed.*/
6699
6700 void
6701 remove_edge_and_dominated_blocks (edge e)
6702 {
6703 VEC (basic_block, heap) *bbs_to_remove = NULL;
6704 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6705 bitmap df, df_idom;
6706 edge f;
6707 edge_iterator ei;
6708 bool none_removed = false;
6709 unsigned i;
6710 basic_block bb, dbb;
6711 bitmap_iterator bi;
6712
6713 if (!dom_info_available_p (CDI_DOMINATORS))
6714 {
6715 remove_edge (e);
6716 return;
6717 }
6718
6719 /* No updating is needed for edges to exit. */
6720 if (e->dest == EXIT_BLOCK_PTR)
6721 {
6722 if (cfgcleanup_altered_bbs)
6723 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6724 remove_edge (e);
6725 return;
6726 }
6727
6728 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6729 that is not dominated by E->dest, then this set is empty. Otherwise,
6730 all the basic blocks dominated by E->dest are removed.
6731
6732 Also, to DF_IDOM we store the immediate dominators of the blocks in
6733 the dominance frontier of E (i.e., of the successors of the
6734 removed blocks, if there are any, and of E->dest otherwise). */
6735 FOR_EACH_EDGE (f, ei, e->dest->preds)
6736 {
6737 if (f == e)
6738 continue;
6739
6740 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6741 {
6742 none_removed = true;
6743 break;
6744 }
6745 }
6746
6747 df = BITMAP_ALLOC (NULL);
6748 df_idom = BITMAP_ALLOC (NULL);
6749
6750 if (none_removed)
6751 bitmap_set_bit (df_idom,
6752 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6753 else
6754 {
6755 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6756 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6757 {
6758 FOR_EACH_EDGE (f, ei, bb->succs)
6759 {
6760 if (f->dest != EXIT_BLOCK_PTR)
6761 bitmap_set_bit (df, f->dest->index);
6762 }
6763 }
6764 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6765 bitmap_clear_bit (df, bb->index);
6766
6767 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6768 {
6769 bb = BASIC_BLOCK (i);
6770 bitmap_set_bit (df_idom,
6771 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6772 }
6773 }
6774
6775 if (cfgcleanup_altered_bbs)
6776 {
6777 /* Record the set of the altered basic blocks. */
6778 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6779 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6780 }
6781
6782 /* Remove E and the cancelled blocks. */
6783 if (none_removed)
6784 remove_edge (e);
6785 else
6786 {
6787 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6788 delete_basic_block (bb);
6789 }
6790
6791 /* Update the dominance information. The immediate dominator may change only
6792 for blocks whose immediate dominator belongs to DF_IDOM:
6793
6794 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6795 removal. Let Z the arbitrary block such that idom(Z) = Y and
6796 Z dominates X after the removal. Before removal, there exists a path P
6797 from Y to X that avoids Z. Let F be the last edge on P that is
6798 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6799 dominates W, and because of P, Z does not dominate W), and W belongs to
6800 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6801 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6802 {
6803 bb = BASIC_BLOCK (i);
6804 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6805 dbb;
6806 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6807 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6808 }
6809
6810 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6811
6812 BITMAP_FREE (df);
6813 BITMAP_FREE (df_idom);
6814 VEC_free (basic_block, heap, bbs_to_remove);
6815 VEC_free (basic_block, heap, bbs_to_fix_dom);
6816 }
6817
6818 /* Purge dead EH edges from basic block BB. */
6819
6820 bool
6821 tree_purge_dead_eh_edges (basic_block bb)
6822 {
6823 bool changed = false;
6824 edge e;
6825 edge_iterator ei;
6826 tree stmt = last_stmt (bb);
6827
6828 if (stmt && tree_can_throw_internal (stmt))
6829 return false;
6830
6831 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6832 {
6833 if (e->flags & EDGE_EH)
6834 {
6835 remove_edge_and_dominated_blocks (e);
6836 changed = true;
6837 }
6838 else
6839 ei_next (&ei);
6840 }
6841
6842 return changed;
6843 }
6844
6845 bool
6846 tree_purge_all_dead_eh_edges (const_bitmap blocks)
6847 {
6848 bool changed = false;
6849 unsigned i;
6850 bitmap_iterator bi;
6851
6852 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6853 {
6854 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
6855 }
6856
6857 return changed;
6858 }
6859
6860 /* This function is called whenever a new edge is created or
6861 redirected. */
6862
6863 static void
6864 tree_execute_on_growing_pred (edge e)
6865 {
6866 basic_block bb = e->dest;
6867
6868 if (phi_nodes (bb))
6869 reserve_phi_args_for_new_edge (bb);
6870 }
6871
6872 /* This function is called immediately before edge E is removed from
6873 the edge vector E->dest->preds. */
6874
6875 static void
6876 tree_execute_on_shrinking_pred (edge e)
6877 {
6878 if (phi_nodes (e->dest))
6879 remove_phi_args (e);
6880 }
6881
6882 /*---------------------------------------------------------------------------
6883 Helper functions for Loop versioning
6884 ---------------------------------------------------------------------------*/
6885
6886 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6887 of 'first'. Both of them are dominated by 'new_head' basic block. When
6888 'new_head' was created by 'second's incoming edge it received phi arguments
6889 on the edge by split_edge(). Later, additional edge 'e' was created to
6890 connect 'new_head' and 'first'. Now this routine adds phi args on this
6891 additional edge 'e' that new_head to second edge received as part of edge
6892 splitting.
6893 */
6894
6895 static void
6896 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6897 basic_block new_head, edge e)
6898 {
6899 tree phi1, phi2;
6900 edge e2 = find_edge (new_head, second);
6901
6902 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6903 edge, we should always have an edge from NEW_HEAD to SECOND. */
6904 gcc_assert (e2 != NULL);
6905
6906 /* Browse all 'second' basic block phi nodes and add phi args to
6907 edge 'e' for 'first' head. PHI args are always in correct order. */
6908
6909 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
6910 phi2 && phi1;
6911 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
6912 {
6913 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
6914 add_phi_arg (phi1, def, e);
6915 }
6916 }
6917
6918 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6919 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6920 the destination of the ELSE part. */
6921 static void
6922 tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6923 basic_block second_head ATTRIBUTE_UNUSED,
6924 basic_block cond_bb, void *cond_e)
6925 {
6926 block_stmt_iterator bsi;
6927 tree new_cond_expr = NULL_TREE;
6928 tree cond_expr = (tree) cond_e;
6929 edge e0;
6930
6931 /* Build new conditional expr */
6932 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
6933 NULL_TREE, NULL_TREE);
6934
6935 /* Add new cond in cond_bb. */
6936 bsi = bsi_start (cond_bb);
6937 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
6938 /* Adjust edges appropriately to connect new head with first head
6939 as well as second head. */
6940 e0 = single_succ_edge (cond_bb);
6941 e0->flags &= ~EDGE_FALLTHRU;
6942 e0->flags |= EDGE_FALSE_VALUE;
6943 }
6944
6945 struct cfg_hooks tree_cfg_hooks = {
6946 "tree",
6947 tree_verify_flow_info,
6948 tree_dump_bb, /* dump_bb */
6949 create_bb, /* create_basic_block */
6950 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
6951 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
6952 tree_can_remove_branch_p, /* can_remove_branch_p */
6953 remove_bb, /* delete_basic_block */
6954 tree_split_block, /* split_block */
6955 tree_move_block_after, /* move_block_after */
6956 tree_can_merge_blocks_p, /* can_merge_blocks_p */
6957 tree_merge_blocks, /* merge_blocks */
6958 tree_predict_edge, /* predict_edge */
6959 tree_predicted_by_p, /* predicted_by_p */
6960 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
6961 tree_duplicate_bb, /* duplicate_block */
6962 tree_split_edge, /* split_edge */
6963 tree_make_forwarder_block, /* make_forward_block */
6964 NULL, /* tidy_fallthru_edge */
6965 tree_block_ends_with_call_p, /* block_ends_with_call_p */
6966 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6967 tree_flow_call_edges_add, /* flow_call_edges_add */
6968 tree_execute_on_growing_pred, /* execute_on_growing_pred */
6969 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6970 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6971 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6972 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6973 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6974 flush_pending_stmts /* flush_pending_stmts */
6975 };
6976
6977
6978 /* Split all critical edges. */
6979
6980 static unsigned int
6981 split_critical_edges (void)
6982 {
6983 basic_block bb;
6984 edge e;
6985 edge_iterator ei;
6986
6987 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6988 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6989 mappings around the calls to split_edge. */
6990 start_recording_case_labels ();
6991 FOR_ALL_BB (bb)
6992 {
6993 FOR_EACH_EDGE (e, ei, bb->succs)
6994 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6995 {
6996 split_edge (e);
6997 }
6998 }
6999 end_recording_case_labels ();
7000 return 0;
7001 }
7002
7003 struct gimple_opt_pass pass_split_crit_edges =
7004 {
7005 {
7006 GIMPLE_PASS,
7007 "crited", /* name */
7008 NULL, /* gate */
7009 split_critical_edges, /* execute */
7010 NULL, /* sub */
7011 NULL, /* next */
7012 0, /* static_pass_number */
7013 TV_TREE_SPLIT_EDGES, /* tv_id */
7014 PROP_cfg, /* properties required */
7015 PROP_no_crit_edges, /* properties_provided */
7016 0, /* properties_destroyed */
7017 0, /* todo_flags_start */
7018 TODO_dump_func /* todo_flags_finish */
7019 }
7020 };
7021
7022 \f
7023 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
7024 a temporary, make sure and register it to be renamed if necessary,
7025 and finally return the temporary. Put the statements to compute
7026 EXP before the current statement in BSI. */
7027
7028 tree
7029 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
7030 {
7031 tree t, new_stmt, orig_stmt;
7032
7033 if (is_gimple_val (exp))
7034 return exp;
7035
7036 t = make_rename_temp (type, NULL);
7037 new_stmt = build_gimple_modify_stmt (t, exp);
7038
7039 orig_stmt = bsi_stmt (*bsi);
7040 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
7041 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
7042
7043 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
7044 if (gimple_in_ssa_p (cfun))
7045 mark_symbols_for_renaming (new_stmt);
7046
7047 return t;
7048 }
7049
7050 /* Build a ternary operation and gimplify it. Emit code before BSI.
7051 Return the gimple_val holding the result. */
7052
7053 tree
7054 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
7055 tree type, tree a, tree b, tree c)
7056 {
7057 tree ret;
7058
7059 ret = fold_build3 (code, type, a, b, c);
7060 STRIP_NOPS (ret);
7061
7062 return gimplify_val (bsi, type, ret);
7063 }
7064
7065 /* Build a binary operation and gimplify it. Emit code before BSI.
7066 Return the gimple_val holding the result. */
7067
7068 tree
7069 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
7070 tree type, tree a, tree b)
7071 {
7072 tree ret;
7073
7074 ret = fold_build2 (code, type, a, b);
7075 STRIP_NOPS (ret);
7076
7077 return gimplify_val (bsi, type, ret);
7078 }
7079
7080 /* Build a unary operation and gimplify it. Emit code before BSI.
7081 Return the gimple_val holding the result. */
7082
7083 tree
7084 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
7085 tree a)
7086 {
7087 tree ret;
7088
7089 ret = fold_build1 (code, type, a);
7090 STRIP_NOPS (ret);
7091
7092 return gimplify_val (bsi, type, ret);
7093 }
7094
7095
7096 \f
7097 /* Emit return warnings. */
7098
7099 static unsigned int
7100 execute_warn_function_return (void)
7101 {
7102 source_location location;
7103 tree last;
7104 edge e;
7105 edge_iterator ei;
7106
7107 /* If we have a path to EXIT, then we do return. */
7108 if (TREE_THIS_VOLATILE (cfun->decl)
7109 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7110 {
7111 location = UNKNOWN_LOCATION;
7112 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7113 {
7114 last = last_stmt (e->src);
7115 if (TREE_CODE (last) == RETURN_EXPR
7116 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
7117 break;
7118 }
7119 if (location == UNKNOWN_LOCATION)
7120 location = cfun->function_end_locus;
7121 warning (0, "%H%<noreturn%> function does return", &location);
7122 }
7123
7124 /* If we see "return;" in some basic block, then we do reach the end
7125 without returning a value. */
7126 else if (warn_return_type
7127 && !TREE_NO_WARNING (cfun->decl)
7128 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7129 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7130 {
7131 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7132 {
7133 tree last = last_stmt (e->src);
7134 if (TREE_CODE (last) == RETURN_EXPR
7135 && TREE_OPERAND (last, 0) == NULL
7136 && !TREE_NO_WARNING (last))
7137 {
7138 location = EXPR_LOCATION (last);
7139 if (location == UNKNOWN_LOCATION)
7140 location = cfun->function_end_locus;
7141 warning (OPT_Wreturn_type, "%Hcontrol reaches end of non-void function", &location);
7142 TREE_NO_WARNING (cfun->decl) = 1;
7143 break;
7144 }
7145 }
7146 }
7147 return 0;
7148 }
7149
7150
7151 /* Given a basic block B which ends with a conditional and has
7152 precisely two successors, determine which of the edges is taken if
7153 the conditional is true and which is taken if the conditional is
7154 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7155
7156 void
7157 extract_true_false_edges_from_block (basic_block b,
7158 edge *true_edge,
7159 edge *false_edge)
7160 {
7161 edge e = EDGE_SUCC (b, 0);
7162
7163 if (e->flags & EDGE_TRUE_VALUE)
7164 {
7165 *true_edge = e;
7166 *false_edge = EDGE_SUCC (b, 1);
7167 }
7168 else
7169 {
7170 *false_edge = e;
7171 *true_edge = EDGE_SUCC (b, 1);
7172 }
7173 }
7174
7175 struct gimple_opt_pass pass_warn_function_return =
7176 {
7177 {
7178 GIMPLE_PASS,
7179 NULL, /* name */
7180 NULL, /* gate */
7181 execute_warn_function_return, /* execute */
7182 NULL, /* sub */
7183 NULL, /* next */
7184 0, /* static_pass_number */
7185 0, /* tv_id */
7186 PROP_cfg, /* properties_required */
7187 0, /* properties_provided */
7188 0, /* properties_destroyed */
7189 0, /* todo_flags_start */
7190 0 /* todo_flags_finish */
7191 }
7192 };
7193
7194 /* Emit noreturn warnings. */
7195
7196 static unsigned int
7197 execute_warn_function_noreturn (void)
7198 {
7199 if (warn_missing_noreturn
7200 && !TREE_THIS_VOLATILE (cfun->decl)
7201 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7202 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7203 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7204 "for attribute %<noreturn%>",
7205 cfun->decl);
7206 return 0;
7207 }
7208
7209 struct gimple_opt_pass pass_warn_function_noreturn =
7210 {
7211 {
7212 GIMPLE_PASS,
7213 NULL, /* name */
7214 NULL, /* gate */
7215 execute_warn_function_noreturn, /* execute */
7216 NULL, /* sub */
7217 NULL, /* next */
7218 0, /* static_pass_number */
7219 0, /* tv_id */
7220 PROP_cfg, /* properties_required */
7221 0, /* properties_provided */
7222 0, /* properties_destroyed */
7223 0, /* todo_flags_start */
7224 0 /* todo_flags_finish */
7225 }
7226 };