* config/i386/i386.md (cmpdi_ccno_1_rex64, *cmpsi_ccno_1,
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "tree-ssa-propagate.h"
48 #include "value-prof.h"
49 #include "pointer-set.h"
50
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54 /* Local declarations. */
55
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
58
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of SWITCH_EXPRs.
63
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
66
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
72 static struct pointer_map_t *edge_to_cases;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Basic blocks and flowgraphs. */
86 static basic_block create_bb (void *, void *, basic_block);
87 static void make_blocks (tree);
88 static void factor_computed_gotos (void);
89
90 /* Edges. */
91 static void make_edges (void);
92 static void make_cond_expr_edges (basic_block);
93 static void make_switch_expr_edges (basic_block);
94 static void make_goto_expr_edges (basic_block);
95 static edge tree_redirect_edge_and_branch (edge, basic_block);
96 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
97 static unsigned int split_critical_edges (void);
98
99 /* Various helpers. */
100 static inline bool stmt_starts_bb_p (tree, tree);
101 static int tree_verify_flow_info (void);
102 static void tree_make_forwarder_block (edge);
103 static void tree_cfg2vcg (FILE *);
104 static inline void change_bb_for_stmt (tree t, basic_block bb);
105
106 /* Flowgraph optimization and cleanup. */
107 static void tree_merge_blocks (basic_block, basic_block);
108 static bool tree_can_merge_blocks_p (basic_block, basic_block);
109 static void remove_bb (basic_block);
110 static edge find_taken_edge_computed_goto (basic_block, tree);
111 static edge find_taken_edge_cond_expr (basic_block, tree);
112 static edge find_taken_edge_switch_expr (basic_block, tree);
113 static tree find_case_label_for_value (tree, tree);
114
115 void
116 init_empty_tree_cfg (void)
117 {
118 /* Initialize the basic block array. */
119 init_flow ();
120 profile_status = PROFILE_ABSENT;
121 n_basic_blocks = NUM_FIXED_BLOCKS;
122 last_basic_block = NUM_FIXED_BLOCKS;
123 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
124 VEC_safe_grow_cleared (basic_block, gc, basic_block_info,
125 initial_cfg_capacity);
126
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
129 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
130 initial_cfg_capacity);
131
132 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
133 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
134 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
135 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
136 }
137
138 /*---------------------------------------------------------------------------
139 Create basic blocks
140 ---------------------------------------------------------------------------*/
141
142 /* Entry point to the CFG builder for trees. TP points to the list of
143 statements to be added to the flowgraph. */
144
145 static void
146 build_tree_cfg (tree *tp)
147 {
148 /* Register specific tree functions. */
149 tree_register_cfg_hooks ();
150
151 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
152
153 init_empty_tree_cfg ();
154
155 found_computed_goto = 0;
156 make_blocks (*tp);
157
158 /* Computed gotos are hell to deal with, especially if there are
159 lots of them with a large number of destinations. So we factor
160 them to a common computed goto location before we build the
161 edge list. After we convert back to normal form, we will un-factor
162 the computed gotos since factoring introduces an unwanted jump. */
163 if (found_computed_goto)
164 factor_computed_gotos ();
165
166 /* Make sure there is always at least one block, even if it's empty. */
167 if (n_basic_blocks == NUM_FIXED_BLOCKS)
168 create_empty_bb (ENTRY_BLOCK_PTR);
169
170 /* Adjust the size of the array. */
171 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
172 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
173
174 /* To speed up statement iterator walks, we first purge dead labels. */
175 cleanup_dead_labels ();
176
177 /* Group case nodes to reduce the number of edges.
178 We do this after cleaning up dead labels because otherwise we miss
179 a lot of obvious case merging opportunities. */
180 group_case_labels ();
181
182 /* Create the edges of the flowgraph. */
183 make_edges ();
184
185 /* Debugging dumps. */
186
187 /* Write the flowgraph to a VCG file. */
188 {
189 int local_dump_flags;
190 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
191 if (vcg_file)
192 {
193 tree_cfg2vcg (vcg_file);
194 dump_end (TDI_vcg, vcg_file);
195 }
196 }
197
198 #ifdef ENABLE_CHECKING
199 verify_stmts ();
200 #endif
201
202 /* Dump a textual representation of the flowgraph. */
203 if (dump_file)
204 dump_tree_cfg (dump_file, dump_flags);
205 }
206
207 static unsigned int
208 execute_build_cfg (void)
209 {
210 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
211 return 0;
212 }
213
214 struct tree_opt_pass pass_build_cfg =
215 {
216 "cfg", /* name */
217 NULL, /* gate */
218 execute_build_cfg, /* execute */
219 NULL, /* sub */
220 NULL, /* next */
221 0, /* static_pass_number */
222 TV_TREE_CFG, /* tv_id */
223 PROP_gimple_leh, /* properties_required */
224 PROP_cfg, /* properties_provided */
225 0, /* properties_destroyed */
226 0, /* todo_flags_start */
227 TODO_verify_stmts | TODO_cleanup_cfg, /* todo_flags_finish */
228 0 /* letter */
229 };
230
231 /* Search the CFG for any computed gotos. If found, factor them to a
232 common computed goto site. Also record the location of that site so
233 that we can un-factor the gotos after we have converted back to
234 normal form. */
235
236 static void
237 factor_computed_gotos (void)
238 {
239 basic_block bb;
240 tree factored_label_decl = NULL;
241 tree var = NULL;
242 tree factored_computed_goto_label = NULL;
243 tree factored_computed_goto = NULL;
244
245 /* We know there are one or more computed gotos in this function.
246 Examine the last statement in each basic block to see if the block
247 ends with a computed goto. */
248
249 FOR_EACH_BB (bb)
250 {
251 block_stmt_iterator bsi = bsi_last (bb);
252 tree last;
253
254 if (bsi_end_p (bsi))
255 continue;
256 last = bsi_stmt (bsi);
257
258 /* Ignore the computed goto we create when we factor the original
259 computed gotos. */
260 if (last == factored_computed_goto)
261 continue;
262
263 /* If the last statement is a computed goto, factor it. */
264 if (computed_goto_p (last))
265 {
266 tree assignment;
267
268 /* The first time we find a computed goto we need to create
269 the factored goto block and the variable each original
270 computed goto will use for their goto destination. */
271 if (! factored_computed_goto)
272 {
273 basic_block new_bb = create_empty_bb (bb);
274 block_stmt_iterator new_bsi = bsi_start (new_bb);
275
276 /* Create the destination of the factored goto. Each original
277 computed goto will put its desired destination into this
278 variable and jump to the label we create immediately
279 below. */
280 var = create_tmp_var (ptr_type_node, "gotovar");
281
282 /* Build a label for the new block which will contain the
283 factored computed goto. */
284 factored_label_decl = create_artificial_label ();
285 factored_computed_goto_label
286 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
287 bsi_insert_after (&new_bsi, factored_computed_goto_label,
288 BSI_NEW_STMT);
289
290 /* Build our new computed goto. */
291 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
292 bsi_insert_after (&new_bsi, factored_computed_goto,
293 BSI_NEW_STMT);
294 }
295
296 /* Copy the original computed goto's destination into VAR. */
297 assignment = build2_gimple (GIMPLE_MODIFY_STMT,
298 var, GOTO_DESTINATION (last));
299 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
300
301 /* And re-vector the computed goto to the new destination. */
302 GOTO_DESTINATION (last) = factored_label_decl;
303 }
304 }
305 }
306
307
308 /* Build a flowgraph for the statement_list STMT_LIST. */
309
310 static void
311 make_blocks (tree stmt_list)
312 {
313 tree_stmt_iterator i = tsi_start (stmt_list);
314 tree stmt = NULL;
315 bool start_new_block = true;
316 bool first_stmt_of_list = true;
317 basic_block bb = ENTRY_BLOCK_PTR;
318
319 while (!tsi_end_p (i))
320 {
321 tree prev_stmt;
322
323 prev_stmt = stmt;
324 stmt = tsi_stmt (i);
325
326 /* If the statement starts a new basic block or if we have determined
327 in a previous pass that we need to create a new block for STMT, do
328 so now. */
329 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
330 {
331 if (!first_stmt_of_list)
332 stmt_list = tsi_split_statement_list_before (&i);
333 bb = create_basic_block (stmt_list, NULL, bb);
334 start_new_block = false;
335 }
336
337 /* Now add STMT to BB and create the subgraphs for special statement
338 codes. */
339 set_bb_for_stmt (stmt, bb);
340
341 if (computed_goto_p (stmt))
342 found_computed_goto = true;
343
344 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
345 next iteration. */
346 if (stmt_ends_bb_p (stmt))
347 start_new_block = true;
348
349 tsi_next (&i);
350 first_stmt_of_list = false;
351 }
352 }
353
354
355 /* Create and return a new empty basic block after bb AFTER. */
356
357 static basic_block
358 create_bb (void *h, void *e, basic_block after)
359 {
360 basic_block bb;
361
362 gcc_assert (!e);
363
364 /* Create and initialize a new basic block. Since alloc_block uses
365 ggc_alloc_cleared to allocate a basic block, we do not have to
366 clear the newly allocated basic block here. */
367 bb = alloc_block ();
368
369 bb->index = last_basic_block;
370 bb->flags = BB_NEW;
371 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
372
373 /* Add the new block to the linked list of blocks. */
374 link_block (bb, after);
375
376 /* Grow the basic block array if needed. */
377 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
378 {
379 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
380 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
381 }
382
383 /* Add the newly created block to the array. */
384 SET_BASIC_BLOCK (last_basic_block, bb);
385
386 n_basic_blocks++;
387 last_basic_block++;
388
389 return bb;
390 }
391
392
393 /*---------------------------------------------------------------------------
394 Edge creation
395 ---------------------------------------------------------------------------*/
396
397 /* Fold COND_EXPR_COND of each COND_EXPR. */
398
399 void
400 fold_cond_expr_cond (void)
401 {
402 basic_block bb;
403
404 FOR_EACH_BB (bb)
405 {
406 tree stmt = last_stmt (bb);
407
408 if (stmt
409 && TREE_CODE (stmt) == COND_EXPR)
410 {
411 tree cond = fold (COND_EXPR_COND (stmt));
412 if (integer_zerop (cond))
413 COND_EXPR_COND (stmt) = boolean_false_node;
414 else if (integer_onep (cond))
415 COND_EXPR_COND (stmt) = boolean_true_node;
416 }
417 }
418 }
419
420 /* Join all the blocks in the flowgraph. */
421
422 static void
423 make_edges (void)
424 {
425 basic_block bb;
426 struct omp_region *cur_region = NULL;
427
428 /* Create an edge from entry to the first block with executable
429 statements in it. */
430 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
431
432 /* Traverse the basic block array placing edges. */
433 FOR_EACH_BB (bb)
434 {
435 tree last = last_stmt (bb);
436 bool fallthru;
437
438 if (last)
439 {
440 enum tree_code code = TREE_CODE (last);
441 switch (code)
442 {
443 case GOTO_EXPR:
444 make_goto_expr_edges (bb);
445 fallthru = false;
446 break;
447 case RETURN_EXPR:
448 make_edge (bb, EXIT_BLOCK_PTR, 0);
449 fallthru = false;
450 break;
451 case COND_EXPR:
452 make_cond_expr_edges (bb);
453 fallthru = false;
454 break;
455 case SWITCH_EXPR:
456 make_switch_expr_edges (bb);
457 fallthru = false;
458 break;
459 case RESX_EXPR:
460 make_eh_edges (last);
461 fallthru = false;
462 break;
463
464 case CALL_EXPR:
465 /* If this function receives a nonlocal goto, then we need to
466 make edges from this call site to all the nonlocal goto
467 handlers. */
468 if (tree_can_make_abnormal_goto (last))
469 make_abnormal_goto_edges (bb, true);
470
471 /* If this statement has reachable exception handlers, then
472 create abnormal edges to them. */
473 make_eh_edges (last);
474
475 /* Some calls are known not to return. */
476 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
477 break;
478
479 case MODIFY_EXPR:
480 gcc_unreachable ();
481
482 case GIMPLE_MODIFY_STMT:
483 if (is_ctrl_altering_stmt (last))
484 {
485 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
486 the CALL_EXPR may have an abnormal edge. Search the RHS
487 for this case and create any required edges. */
488 if (tree_can_make_abnormal_goto (last))
489 make_abnormal_goto_edges (bb, true);
490
491 make_eh_edges (last);
492 }
493 fallthru = true;
494 break;
495
496 case OMP_PARALLEL:
497 case OMP_FOR:
498 case OMP_SINGLE:
499 case OMP_MASTER:
500 case OMP_ORDERED:
501 case OMP_CRITICAL:
502 case OMP_SECTION:
503 cur_region = new_omp_region (bb, code, cur_region);
504 fallthru = true;
505 break;
506
507 case OMP_SECTIONS:
508 cur_region = new_omp_region (bb, code, cur_region);
509 fallthru = false;
510 break;
511
512 case OMP_RETURN:
513 /* In the case of an OMP_SECTION, the edge will go somewhere
514 other than the next block. This will be created later. */
515 cur_region->exit = bb;
516 fallthru = cur_region->type != OMP_SECTION;
517 cur_region = cur_region->outer;
518 break;
519
520 case OMP_CONTINUE:
521 cur_region->cont = bb;
522 switch (cur_region->type)
523 {
524 case OMP_FOR:
525 /* ??? Technically there should be a some sort of loopback
526 edge here, but it goes to a block that doesn't exist yet,
527 and without it, updating the ssa form would be a real
528 bear. Fortunately, we don't yet do ssa before expanding
529 these nodes. */
530 break;
531
532 case OMP_SECTIONS:
533 /* Wire up the edges into and out of the nested sections. */
534 /* ??? Similarly wrt loopback. */
535 {
536 struct omp_region *i;
537 for (i = cur_region->inner; i ; i = i->next)
538 {
539 gcc_assert (i->type == OMP_SECTION);
540 make_edge (cur_region->entry, i->entry, 0);
541 make_edge (i->exit, bb, EDGE_FALLTHRU);
542 }
543 }
544 break;
545
546 default:
547 gcc_unreachable ();
548 }
549 fallthru = true;
550 break;
551
552 default:
553 gcc_assert (!stmt_ends_bb_p (last));
554 fallthru = true;
555 }
556 }
557 else
558 fallthru = true;
559
560 if (fallthru)
561 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
562 }
563
564 if (root_omp_region)
565 free_omp_regions ();
566
567 /* Fold COND_EXPR_COND of each COND_EXPR. */
568 fold_cond_expr_cond ();
569 }
570
571
572 /* Create the edges for a COND_EXPR starting at block BB.
573 At this point, both clauses must contain only simple gotos. */
574
575 static void
576 make_cond_expr_edges (basic_block bb)
577 {
578 tree entry = last_stmt (bb);
579 basic_block then_bb, else_bb;
580 tree then_label, else_label;
581 edge e;
582
583 gcc_assert (entry);
584 gcc_assert (TREE_CODE (entry) == COND_EXPR);
585
586 /* Entry basic blocks for each component. */
587 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
588 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
589 then_bb = label_to_block (then_label);
590 else_bb = label_to_block (else_label);
591
592 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
593 #ifdef USE_MAPPED_LOCATION
594 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
595 #else
596 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
597 #endif
598 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
599 if (e)
600 {
601 #ifdef USE_MAPPED_LOCATION
602 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
603 #else
604 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
605 #endif
606 }
607 }
608
609
610 /* Called for each element in the hash table (P) as we delete the
611 edge to cases hash table.
612
613 Clear all the TREE_CHAINs to prevent problems with copying of
614 SWITCH_EXPRs and structure sharing rules, then free the hash table
615 element. */
616
617 static bool
618 edge_to_cases_cleanup (void *key ATTRIBUTE_UNUSED, void **value,
619 void *data ATTRIBUTE_UNUSED)
620 {
621 tree t, next;
622
623 for (t = (tree) *value; t; t = next)
624 {
625 next = TREE_CHAIN (t);
626 TREE_CHAIN (t) = NULL;
627 }
628
629 *value = NULL;
630 return false;
631 }
632
633 /* Start recording information mapping edges to case labels. */
634
635 void
636 start_recording_case_labels (void)
637 {
638 gcc_assert (edge_to_cases == NULL);
639 edge_to_cases = pointer_map_create ();
640 }
641
642 /* Return nonzero if we are recording information for case labels. */
643
644 static bool
645 recording_case_labels_p (void)
646 {
647 return (edge_to_cases != NULL);
648 }
649
650 /* Stop recording information mapping edges to case labels and
651 remove any information we have recorded. */
652 void
653 end_recording_case_labels (void)
654 {
655 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
656 pointer_map_destroy (edge_to_cases);
657 edge_to_cases = NULL;
658 }
659
660 /* If we are inside a {start,end}_recording_cases block, then return
661 a chain of CASE_LABEL_EXPRs from T which reference E.
662
663 Otherwise return NULL. */
664
665 static tree
666 get_cases_for_edge (edge e, tree t)
667 {
668 void **slot;
669 size_t i, n;
670 tree vec;
671
672 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
673 chains available. Return NULL so the caller can detect this case. */
674 if (!recording_case_labels_p ())
675 return NULL;
676
677 slot = pointer_map_contains (edge_to_cases, e);
678 if (slot)
679 return (tree) *slot;
680
681 /* If we did not find E in the hash table, then this must be the first
682 time we have been queried for information about E & T. Add all the
683 elements from T to the hash table then perform the query again. */
684
685 vec = SWITCH_LABELS (t);
686 n = TREE_VEC_LENGTH (vec);
687 for (i = 0; i < n; i++)
688 {
689 tree elt = TREE_VEC_ELT (vec, i);
690 tree lab = CASE_LABEL (elt);
691 basic_block label_bb = label_to_block (lab);
692 edge this_edge = find_edge (e->src, label_bb);
693
694 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
695 a new chain. */
696 slot = pointer_map_insert (edge_to_cases, this_edge);
697 TREE_CHAIN (elt) = (tree) *slot;
698 *slot = elt;
699 }
700
701 return (tree) *pointer_map_contains (edge_to_cases, e);
702 }
703
704 /* Create the edges for a SWITCH_EXPR starting at block BB.
705 At this point, the switch body has been lowered and the
706 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
707
708 static void
709 make_switch_expr_edges (basic_block bb)
710 {
711 tree entry = last_stmt (bb);
712 size_t i, n;
713 tree vec;
714
715 vec = SWITCH_LABELS (entry);
716 n = TREE_VEC_LENGTH (vec);
717
718 for (i = 0; i < n; ++i)
719 {
720 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
721 basic_block label_bb = label_to_block (lab);
722 make_edge (bb, label_bb, 0);
723 }
724 }
725
726
727 /* Return the basic block holding label DEST. */
728
729 basic_block
730 label_to_block_fn (struct function *ifun, tree dest)
731 {
732 int uid = LABEL_DECL_UID (dest);
733
734 /* We would die hard when faced by an undefined label. Emit a label to
735 the very first basic block. This will hopefully make even the dataflow
736 and undefined variable warnings quite right. */
737 if ((errorcount || sorrycount) && uid < 0)
738 {
739 block_stmt_iterator bsi =
740 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
741 tree stmt;
742
743 stmt = build1 (LABEL_EXPR, void_type_node, dest);
744 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
745 uid = LABEL_DECL_UID (dest);
746 }
747 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
748 <= (unsigned int) uid)
749 return NULL;
750 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
751 }
752
753 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
754 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
755
756 void
757 make_abnormal_goto_edges (basic_block bb, bool for_call)
758 {
759 basic_block target_bb;
760 block_stmt_iterator bsi;
761
762 FOR_EACH_BB (target_bb)
763 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
764 {
765 tree target = bsi_stmt (bsi);
766
767 if (TREE_CODE (target) != LABEL_EXPR)
768 break;
769
770 target = LABEL_EXPR_LABEL (target);
771
772 /* Make an edge to every label block that has been marked as a
773 potential target for a computed goto or a non-local goto. */
774 if ((FORCED_LABEL (target) && !for_call)
775 || (DECL_NONLOCAL (target) && for_call))
776 {
777 make_edge (bb, target_bb, EDGE_ABNORMAL);
778 break;
779 }
780 }
781 }
782
783 /* Create edges for a goto statement at block BB. */
784
785 static void
786 make_goto_expr_edges (basic_block bb)
787 {
788 block_stmt_iterator last = bsi_last (bb);
789 tree goto_t = bsi_stmt (last);
790
791 /* A simple GOTO creates normal edges. */
792 if (simple_goto_p (goto_t))
793 {
794 tree dest = GOTO_DESTINATION (goto_t);
795 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
796 #ifdef USE_MAPPED_LOCATION
797 e->goto_locus = EXPR_LOCATION (goto_t);
798 #else
799 e->goto_locus = EXPR_LOCUS (goto_t);
800 #endif
801 bsi_remove (&last, true);
802 return;
803 }
804
805 /* A computed GOTO creates abnormal edges. */
806 make_abnormal_goto_edges (bb, false);
807 }
808
809
810 /*---------------------------------------------------------------------------
811 Flowgraph analysis
812 ---------------------------------------------------------------------------*/
813
814 /* Cleanup useless labels in basic blocks. This is something we wish
815 to do early because it allows us to group case labels before creating
816 the edges for the CFG, and it speeds up block statement iterators in
817 all passes later on.
818 We only run this pass once, running it more than once is probably not
819 profitable. */
820
821 /* A map from basic block index to the leading label of that block. */
822 static tree *label_for_bb;
823
824 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
825 static void
826 update_eh_label (struct eh_region *region)
827 {
828 tree old_label = get_eh_region_tree_label (region);
829 if (old_label)
830 {
831 tree new_label;
832 basic_block bb = label_to_block (old_label);
833
834 /* ??? After optimizing, there may be EH regions with labels
835 that have already been removed from the function body, so
836 there is no basic block for them. */
837 if (! bb)
838 return;
839
840 new_label = label_for_bb[bb->index];
841 set_eh_region_tree_label (region, new_label);
842 }
843 }
844
845 /* Given LABEL return the first label in the same basic block. */
846 static tree
847 main_block_label (tree label)
848 {
849 basic_block bb = label_to_block (label);
850
851 /* label_to_block possibly inserted undefined label into the chain. */
852 if (!label_for_bb[bb->index])
853 label_for_bb[bb->index] = label;
854 return label_for_bb[bb->index];
855 }
856
857 /* Cleanup redundant labels. This is a three-step process:
858 1) Find the leading label for each block.
859 2) Redirect all references to labels to the leading labels.
860 3) Cleanup all useless labels. */
861
862 void
863 cleanup_dead_labels (void)
864 {
865 basic_block bb;
866 label_for_bb = XCNEWVEC (tree, last_basic_block);
867
868 /* Find a suitable label for each block. We use the first user-defined
869 label if there is one, or otherwise just the first label we see. */
870 FOR_EACH_BB (bb)
871 {
872 block_stmt_iterator i;
873
874 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
875 {
876 tree label, stmt = bsi_stmt (i);
877
878 if (TREE_CODE (stmt) != LABEL_EXPR)
879 break;
880
881 label = LABEL_EXPR_LABEL (stmt);
882
883 /* If we have not yet seen a label for the current block,
884 remember this one and see if there are more labels. */
885 if (! label_for_bb[bb->index])
886 {
887 label_for_bb[bb->index] = label;
888 continue;
889 }
890
891 /* If we did see a label for the current block already, but it
892 is an artificially created label, replace it if the current
893 label is a user defined label. */
894 if (! DECL_ARTIFICIAL (label)
895 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
896 {
897 label_for_bb[bb->index] = label;
898 break;
899 }
900 }
901 }
902
903 /* Now redirect all jumps/branches to the selected label.
904 First do so for each block ending in a control statement. */
905 FOR_EACH_BB (bb)
906 {
907 tree stmt = last_stmt (bb);
908 if (!stmt)
909 continue;
910
911 switch (TREE_CODE (stmt))
912 {
913 case COND_EXPR:
914 {
915 tree true_branch, false_branch;
916
917 true_branch = COND_EXPR_THEN (stmt);
918 false_branch = COND_EXPR_ELSE (stmt);
919
920 GOTO_DESTINATION (true_branch)
921 = main_block_label (GOTO_DESTINATION (true_branch));
922 GOTO_DESTINATION (false_branch)
923 = main_block_label (GOTO_DESTINATION (false_branch));
924
925 break;
926 }
927
928 case SWITCH_EXPR:
929 {
930 size_t i;
931 tree vec = SWITCH_LABELS (stmt);
932 size_t n = TREE_VEC_LENGTH (vec);
933
934 /* Replace all destination labels. */
935 for (i = 0; i < n; ++i)
936 {
937 tree elt = TREE_VEC_ELT (vec, i);
938 tree label = main_block_label (CASE_LABEL (elt));
939 CASE_LABEL (elt) = label;
940 }
941 break;
942 }
943
944 /* We have to handle GOTO_EXPRs until they're removed, and we don't
945 remove them until after we've created the CFG edges. */
946 case GOTO_EXPR:
947 if (! computed_goto_p (stmt))
948 {
949 GOTO_DESTINATION (stmt)
950 = main_block_label (GOTO_DESTINATION (stmt));
951 break;
952 }
953
954 default:
955 break;
956 }
957 }
958
959 for_each_eh_region (update_eh_label);
960
961 /* Finally, purge dead labels. All user-defined labels and labels that
962 can be the target of non-local gotos and labels which have their
963 address taken are preserved. */
964 FOR_EACH_BB (bb)
965 {
966 block_stmt_iterator i;
967 tree label_for_this_bb = label_for_bb[bb->index];
968
969 if (! label_for_this_bb)
970 continue;
971
972 for (i = bsi_start (bb); !bsi_end_p (i); )
973 {
974 tree label, stmt = bsi_stmt (i);
975
976 if (TREE_CODE (stmt) != LABEL_EXPR)
977 break;
978
979 label = LABEL_EXPR_LABEL (stmt);
980
981 if (label == label_for_this_bb
982 || ! DECL_ARTIFICIAL (label)
983 || DECL_NONLOCAL (label)
984 || FORCED_LABEL (label))
985 bsi_next (&i);
986 else
987 bsi_remove (&i, true);
988 }
989 }
990
991 free (label_for_bb);
992 }
993
994 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
995 and scan the sorted vector of cases. Combine the ones jumping to the
996 same label.
997 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
998
999 void
1000 group_case_labels (void)
1001 {
1002 basic_block bb;
1003
1004 FOR_EACH_BB (bb)
1005 {
1006 tree stmt = last_stmt (bb);
1007 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1008 {
1009 tree labels = SWITCH_LABELS (stmt);
1010 int old_size = TREE_VEC_LENGTH (labels);
1011 int i, j, new_size = old_size;
1012 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1013 tree default_label;
1014
1015 /* The default label is always the last case in a switch
1016 statement after gimplification. */
1017 default_label = CASE_LABEL (default_case);
1018
1019 /* Look for possible opportunities to merge cases.
1020 Ignore the last element of the label vector because it
1021 must be the default case. */
1022 i = 0;
1023 while (i < old_size - 1)
1024 {
1025 tree base_case, base_label, base_high;
1026 base_case = TREE_VEC_ELT (labels, i);
1027
1028 gcc_assert (base_case);
1029 base_label = CASE_LABEL (base_case);
1030
1031 /* Discard cases that have the same destination as the
1032 default case. */
1033 if (base_label == default_label)
1034 {
1035 TREE_VEC_ELT (labels, i) = NULL_TREE;
1036 i++;
1037 new_size--;
1038 continue;
1039 }
1040
1041 base_high = CASE_HIGH (base_case) ?
1042 CASE_HIGH (base_case) : CASE_LOW (base_case);
1043 i++;
1044 /* Try to merge case labels. Break out when we reach the end
1045 of the label vector or when we cannot merge the next case
1046 label with the current one. */
1047 while (i < old_size - 1)
1048 {
1049 tree merge_case = TREE_VEC_ELT (labels, i);
1050 tree merge_label = CASE_LABEL (merge_case);
1051 tree t = int_const_binop (PLUS_EXPR, base_high,
1052 integer_one_node, 1);
1053
1054 /* Merge the cases if they jump to the same place,
1055 and their ranges are consecutive. */
1056 if (merge_label == base_label
1057 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1058 {
1059 base_high = CASE_HIGH (merge_case) ?
1060 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1061 CASE_HIGH (base_case) = base_high;
1062 TREE_VEC_ELT (labels, i) = NULL_TREE;
1063 new_size--;
1064 i++;
1065 }
1066 else
1067 break;
1068 }
1069 }
1070
1071 /* Compress the case labels in the label vector, and adjust the
1072 length of the vector. */
1073 for (i = 0, j = 0; i < new_size; i++)
1074 {
1075 while (! TREE_VEC_ELT (labels, j))
1076 j++;
1077 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1078 }
1079 TREE_VEC_LENGTH (labels) = new_size;
1080 }
1081 }
1082 }
1083
1084 /* Checks whether we can merge block B into block A. */
1085
1086 static bool
1087 tree_can_merge_blocks_p (basic_block a, basic_block b)
1088 {
1089 tree stmt;
1090 block_stmt_iterator bsi;
1091 tree phi;
1092
1093 if (!single_succ_p (a))
1094 return false;
1095
1096 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1097 return false;
1098
1099 if (single_succ (a) != b)
1100 return false;
1101
1102 if (!single_pred_p (b))
1103 return false;
1104
1105 if (b == EXIT_BLOCK_PTR)
1106 return false;
1107
1108 /* If A ends by a statement causing exceptions or something similar, we
1109 cannot merge the blocks. */
1110 stmt = last_stmt (a);
1111 if (stmt && stmt_ends_bb_p (stmt))
1112 return false;
1113
1114 /* Do not allow a block with only a non-local label to be merged. */
1115 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1116 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1117 return false;
1118
1119 /* It must be possible to eliminate all phi nodes in B. If ssa form
1120 is not up-to-date, we cannot eliminate any phis; however, if only
1121 some symbols as whole are marked for renaming, this is not a problem,
1122 as phi nodes for those symbols are irrelevant in updating anyway. */
1123 phi = phi_nodes (b);
1124 if (phi)
1125 {
1126 if (name_mappings_registered_p ())
1127 return false;
1128
1129 for (; phi; phi = PHI_CHAIN (phi))
1130 if (!is_gimple_reg (PHI_RESULT (phi))
1131 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1132 return false;
1133 }
1134
1135 /* Do not remove user labels. */
1136 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1137 {
1138 stmt = bsi_stmt (bsi);
1139 if (TREE_CODE (stmt) != LABEL_EXPR)
1140 break;
1141 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1142 return false;
1143 }
1144
1145 /* Protect the loop latches. */
1146 if (current_loops
1147 && b->loop_father->latch == b)
1148 return false;
1149
1150 return true;
1151 }
1152
1153 /* Replaces all uses of NAME by VAL. */
1154
1155 void
1156 replace_uses_by (tree name, tree val)
1157 {
1158 imm_use_iterator imm_iter;
1159 use_operand_p use;
1160 tree stmt;
1161 edge e;
1162
1163 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1164 {
1165 if (TREE_CODE (stmt) != PHI_NODE)
1166 push_stmt_changes (&stmt);
1167
1168 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1169 {
1170 replace_exp (use, val);
1171
1172 if (TREE_CODE (stmt) == PHI_NODE)
1173 {
1174 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1175 if (e->flags & EDGE_ABNORMAL)
1176 {
1177 /* This can only occur for virtual operands, since
1178 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1179 would prevent replacement. */
1180 gcc_assert (!is_gimple_reg (name));
1181 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1182 }
1183 }
1184 }
1185
1186 if (TREE_CODE (stmt) != PHI_NODE)
1187 {
1188 tree rhs;
1189
1190 fold_stmt_inplace (stmt);
1191
1192 /* FIXME. This should go in pop_stmt_changes. */
1193 rhs = get_rhs (stmt);
1194 if (TREE_CODE (rhs) == ADDR_EXPR)
1195 recompute_tree_invariant_for_addr_expr (rhs);
1196
1197 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1198
1199 pop_stmt_changes (&stmt);
1200 }
1201 }
1202
1203 gcc_assert (zero_imm_uses_p (name));
1204
1205 /* Also update the trees stored in loop structures. */
1206 if (current_loops)
1207 {
1208 struct loop *loop;
1209 loop_iterator li;
1210
1211 FOR_EACH_LOOP (li, loop, 0)
1212 {
1213 substitute_in_loop_info (loop, name, val);
1214 }
1215 }
1216 }
1217
1218 /* Merge block B into block A. */
1219
1220 static void
1221 tree_merge_blocks (basic_block a, basic_block b)
1222 {
1223 block_stmt_iterator bsi;
1224 tree_stmt_iterator last;
1225 tree phi;
1226
1227 if (dump_file)
1228 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1229
1230 /* Remove all single-valued PHI nodes from block B of the form
1231 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1232 bsi = bsi_last (a);
1233 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1234 {
1235 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1236 tree copy;
1237 bool may_replace_uses = may_propagate_copy (def, use);
1238
1239 /* In case we have loops to care about, do not propagate arguments of
1240 loop closed ssa phi nodes. */
1241 if (current_loops
1242 && is_gimple_reg (def)
1243 && TREE_CODE (use) == SSA_NAME
1244 && a->loop_father != b->loop_father)
1245 may_replace_uses = false;
1246
1247 if (!may_replace_uses)
1248 {
1249 gcc_assert (is_gimple_reg (def));
1250
1251 /* Note that just emitting the copies is fine -- there is no problem
1252 with ordering of phi nodes. This is because A is the single
1253 predecessor of B, therefore results of the phi nodes cannot
1254 appear as arguments of the phi nodes. */
1255 copy = build2_gimple (GIMPLE_MODIFY_STMT, def, use);
1256 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1257 SSA_NAME_DEF_STMT (def) = copy;
1258 remove_phi_node (phi, NULL, false);
1259 }
1260 else
1261 {
1262 replace_uses_by (def, use);
1263 remove_phi_node (phi, NULL, true);
1264 }
1265 }
1266
1267 /* Ensure that B follows A. */
1268 move_block_after (b, a);
1269
1270 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1271 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1272
1273 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1274 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1275 {
1276 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1277 {
1278 tree label = bsi_stmt (bsi);
1279
1280 bsi_remove (&bsi, false);
1281 /* Now that we can thread computed gotos, we might have
1282 a situation where we have a forced label in block B
1283 However, the label at the start of block B might still be
1284 used in other ways (think about the runtime checking for
1285 Fortran assigned gotos). So we can not just delete the
1286 label. Instead we move the label to the start of block A. */
1287 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1288 {
1289 block_stmt_iterator dest_bsi = bsi_start (a);
1290 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1291 }
1292 }
1293 else
1294 {
1295 change_bb_for_stmt (bsi_stmt (bsi), a);
1296 bsi_next (&bsi);
1297 }
1298 }
1299
1300 /* Merge the chains. */
1301 last = tsi_last (a->stmt_list);
1302 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1303 b->stmt_list = NULL;
1304 }
1305
1306
1307 /* Return the one of two successors of BB that is not reachable by a
1308 reached by a complex edge, if there is one. Else, return BB. We use
1309 this in optimizations that use post-dominators for their heuristics,
1310 to catch the cases in C++ where function calls are involved. */
1311
1312 basic_block
1313 single_noncomplex_succ (basic_block bb)
1314 {
1315 edge e0, e1;
1316 if (EDGE_COUNT (bb->succs) != 2)
1317 return bb;
1318
1319 e0 = EDGE_SUCC (bb, 0);
1320 e1 = EDGE_SUCC (bb, 1);
1321 if (e0->flags & EDGE_COMPLEX)
1322 return e1->dest;
1323 if (e1->flags & EDGE_COMPLEX)
1324 return e0->dest;
1325
1326 return bb;
1327 }
1328
1329
1330 /* Walk the function tree removing unnecessary statements.
1331
1332 * Empty statement nodes are removed
1333
1334 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1335
1336 * Unnecessary COND_EXPRs are removed
1337
1338 * Some unnecessary BIND_EXPRs are removed
1339
1340 Clearly more work could be done. The trick is doing the analysis
1341 and removal fast enough to be a net improvement in compile times.
1342
1343 Note that when we remove a control structure such as a COND_EXPR
1344 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1345 to ensure we eliminate all the useless code. */
1346
1347 struct rus_data
1348 {
1349 tree *last_goto;
1350 bool repeat;
1351 bool may_throw;
1352 bool may_branch;
1353 bool has_label;
1354 };
1355
1356 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1357
1358 static bool
1359 remove_useless_stmts_warn_notreached (tree stmt)
1360 {
1361 if (EXPR_HAS_LOCATION (stmt))
1362 {
1363 location_t loc = EXPR_LOCATION (stmt);
1364 if (LOCATION_LINE (loc) > 0)
1365 {
1366 warning (0, "%Hwill never be executed", &loc);
1367 return true;
1368 }
1369 }
1370
1371 switch (TREE_CODE (stmt))
1372 {
1373 case STATEMENT_LIST:
1374 {
1375 tree_stmt_iterator i;
1376 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1377 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1378 return true;
1379 }
1380 break;
1381
1382 case COND_EXPR:
1383 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1384 return true;
1385 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1386 return true;
1387 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1388 return true;
1389 break;
1390
1391 case TRY_FINALLY_EXPR:
1392 case TRY_CATCH_EXPR:
1393 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1394 return true;
1395 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1396 return true;
1397 break;
1398
1399 case CATCH_EXPR:
1400 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1401 case EH_FILTER_EXPR:
1402 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1403 case BIND_EXPR:
1404 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1405
1406 default:
1407 /* Not a live container. */
1408 break;
1409 }
1410
1411 return false;
1412 }
1413
1414 static void
1415 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1416 {
1417 tree then_clause, else_clause, cond;
1418 bool save_has_label, then_has_label, else_has_label;
1419
1420 save_has_label = data->has_label;
1421 data->has_label = false;
1422 data->last_goto = NULL;
1423
1424 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1425
1426 then_has_label = data->has_label;
1427 data->has_label = false;
1428 data->last_goto = NULL;
1429
1430 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1431
1432 else_has_label = data->has_label;
1433 data->has_label = save_has_label | then_has_label | else_has_label;
1434
1435 then_clause = COND_EXPR_THEN (*stmt_p);
1436 else_clause = COND_EXPR_ELSE (*stmt_p);
1437 cond = fold (COND_EXPR_COND (*stmt_p));
1438
1439 /* If neither arm does anything at all, we can remove the whole IF. */
1440 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1441 {
1442 *stmt_p = build_empty_stmt ();
1443 data->repeat = true;
1444 }
1445
1446 /* If there are no reachable statements in an arm, then we can
1447 zap the entire conditional. */
1448 else if (integer_nonzerop (cond) && !else_has_label)
1449 {
1450 if (warn_notreached)
1451 remove_useless_stmts_warn_notreached (else_clause);
1452 *stmt_p = then_clause;
1453 data->repeat = true;
1454 }
1455 else if (integer_zerop (cond) && !then_has_label)
1456 {
1457 if (warn_notreached)
1458 remove_useless_stmts_warn_notreached (then_clause);
1459 *stmt_p = else_clause;
1460 data->repeat = true;
1461 }
1462
1463 /* Check a couple of simple things on then/else with single stmts. */
1464 else
1465 {
1466 tree then_stmt = expr_only (then_clause);
1467 tree else_stmt = expr_only (else_clause);
1468
1469 /* Notice branches to a common destination. */
1470 if (then_stmt && else_stmt
1471 && TREE_CODE (then_stmt) == GOTO_EXPR
1472 && TREE_CODE (else_stmt) == GOTO_EXPR
1473 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1474 {
1475 *stmt_p = then_stmt;
1476 data->repeat = true;
1477 }
1478
1479 /* If the THEN/ELSE clause merely assigns a value to a variable or
1480 parameter which is already known to contain that value, then
1481 remove the useless THEN/ELSE clause. */
1482 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1483 {
1484 if (else_stmt
1485 && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
1486 && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
1487 && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
1488 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1489 }
1490 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1491 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1492 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1493 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1494 {
1495 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1496 ? then_stmt : else_stmt);
1497 tree *location = (TREE_CODE (cond) == EQ_EXPR
1498 ? &COND_EXPR_THEN (*stmt_p)
1499 : &COND_EXPR_ELSE (*stmt_p));
1500
1501 if (stmt
1502 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1503 && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1504 && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1505 *location = alloc_stmt_list ();
1506 }
1507 }
1508
1509 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1510 would be re-introduced during lowering. */
1511 data->last_goto = NULL;
1512 }
1513
1514
1515 static void
1516 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1517 {
1518 bool save_may_branch, save_may_throw;
1519 bool this_may_branch, this_may_throw;
1520
1521 /* Collect may_branch and may_throw information for the body only. */
1522 save_may_branch = data->may_branch;
1523 save_may_throw = data->may_throw;
1524 data->may_branch = false;
1525 data->may_throw = false;
1526 data->last_goto = NULL;
1527
1528 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1529
1530 this_may_branch = data->may_branch;
1531 this_may_throw = data->may_throw;
1532 data->may_branch |= save_may_branch;
1533 data->may_throw |= save_may_throw;
1534 data->last_goto = NULL;
1535
1536 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1537
1538 /* If the body is empty, then we can emit the FINALLY block without
1539 the enclosing TRY_FINALLY_EXPR. */
1540 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1541 {
1542 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1543 data->repeat = true;
1544 }
1545
1546 /* If the handler is empty, then we can emit the TRY block without
1547 the enclosing TRY_FINALLY_EXPR. */
1548 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1549 {
1550 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1551 data->repeat = true;
1552 }
1553
1554 /* If the body neither throws, nor branches, then we can safely
1555 string the TRY and FINALLY blocks together. */
1556 else if (!this_may_branch && !this_may_throw)
1557 {
1558 tree stmt = *stmt_p;
1559 *stmt_p = TREE_OPERAND (stmt, 0);
1560 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1561 data->repeat = true;
1562 }
1563 }
1564
1565
1566 static void
1567 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1568 {
1569 bool save_may_throw, this_may_throw;
1570 tree_stmt_iterator i;
1571 tree stmt;
1572
1573 /* Collect may_throw information for the body only. */
1574 save_may_throw = data->may_throw;
1575 data->may_throw = false;
1576 data->last_goto = NULL;
1577
1578 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1579
1580 this_may_throw = data->may_throw;
1581 data->may_throw = save_may_throw;
1582
1583 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1584 if (!this_may_throw)
1585 {
1586 if (warn_notreached)
1587 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1588 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1589 data->repeat = true;
1590 return;
1591 }
1592
1593 /* Process the catch clause specially. We may be able to tell that
1594 no exceptions propagate past this point. */
1595
1596 this_may_throw = true;
1597 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1598 stmt = tsi_stmt (i);
1599 data->last_goto = NULL;
1600
1601 switch (TREE_CODE (stmt))
1602 {
1603 case CATCH_EXPR:
1604 for (; !tsi_end_p (i); tsi_next (&i))
1605 {
1606 stmt = tsi_stmt (i);
1607 /* If we catch all exceptions, then the body does not
1608 propagate exceptions past this point. */
1609 if (CATCH_TYPES (stmt) == NULL)
1610 this_may_throw = false;
1611 data->last_goto = NULL;
1612 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1613 }
1614 break;
1615
1616 case EH_FILTER_EXPR:
1617 if (EH_FILTER_MUST_NOT_THROW (stmt))
1618 this_may_throw = false;
1619 else if (EH_FILTER_TYPES (stmt) == NULL)
1620 this_may_throw = false;
1621 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1622 break;
1623
1624 default:
1625 /* Otherwise this is a cleanup. */
1626 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1627
1628 /* If the cleanup is empty, then we can emit the TRY block without
1629 the enclosing TRY_CATCH_EXPR. */
1630 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1631 {
1632 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1633 data->repeat = true;
1634 }
1635 break;
1636 }
1637 data->may_throw |= this_may_throw;
1638 }
1639
1640
1641 static void
1642 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1643 {
1644 tree block;
1645
1646 /* First remove anything underneath the BIND_EXPR. */
1647 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1648
1649 /* If the BIND_EXPR has no variables, then we can pull everything
1650 up one level and remove the BIND_EXPR, unless this is the toplevel
1651 BIND_EXPR for the current function or an inlined function.
1652
1653 When this situation occurs we will want to apply this
1654 optimization again. */
1655 block = BIND_EXPR_BLOCK (*stmt_p);
1656 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1657 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1658 && (! block
1659 || ! BLOCK_ABSTRACT_ORIGIN (block)
1660 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1661 != FUNCTION_DECL)))
1662 {
1663 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1664 data->repeat = true;
1665 }
1666 }
1667
1668
1669 static void
1670 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1671 {
1672 tree dest = GOTO_DESTINATION (*stmt_p);
1673
1674 data->may_branch = true;
1675 data->last_goto = NULL;
1676
1677 /* Record the last goto expr, so that we can delete it if unnecessary. */
1678 if (TREE_CODE (dest) == LABEL_DECL)
1679 data->last_goto = stmt_p;
1680 }
1681
1682
1683 static void
1684 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1685 {
1686 tree label = LABEL_EXPR_LABEL (*stmt_p);
1687
1688 data->has_label = true;
1689
1690 /* We do want to jump across non-local label receiver code. */
1691 if (DECL_NONLOCAL (label))
1692 data->last_goto = NULL;
1693
1694 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1695 {
1696 *data->last_goto = build_empty_stmt ();
1697 data->repeat = true;
1698 }
1699
1700 /* ??? Add something here to delete unused labels. */
1701 }
1702
1703
1704 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1705 decl. This allows us to eliminate redundant or useless
1706 calls to "const" functions.
1707
1708 Gimplifier already does the same operation, but we may notice functions
1709 being const and pure once their calls has been gimplified, so we need
1710 to update the flag. */
1711
1712 static void
1713 update_call_expr_flags (tree call)
1714 {
1715 tree decl = get_callee_fndecl (call);
1716 if (!decl)
1717 return;
1718 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1719 TREE_SIDE_EFFECTS (call) = 0;
1720 if (TREE_NOTHROW (decl))
1721 TREE_NOTHROW (call) = 1;
1722 }
1723
1724
1725 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1726
1727 void
1728 notice_special_calls (tree t)
1729 {
1730 int flags = call_expr_flags (t);
1731
1732 if (flags & ECF_MAY_BE_ALLOCA)
1733 current_function_calls_alloca = true;
1734 if (flags & ECF_RETURNS_TWICE)
1735 current_function_calls_setjmp = true;
1736 }
1737
1738
1739 /* Clear flags set by notice_special_calls. Used by dead code removal
1740 to update the flags. */
1741
1742 void
1743 clear_special_calls (void)
1744 {
1745 current_function_calls_alloca = false;
1746 current_function_calls_setjmp = false;
1747 }
1748
1749
1750 static void
1751 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1752 {
1753 tree t = *tp, op;
1754
1755 switch (TREE_CODE (t))
1756 {
1757 case COND_EXPR:
1758 remove_useless_stmts_cond (tp, data);
1759 break;
1760
1761 case TRY_FINALLY_EXPR:
1762 remove_useless_stmts_tf (tp, data);
1763 break;
1764
1765 case TRY_CATCH_EXPR:
1766 remove_useless_stmts_tc (tp, data);
1767 break;
1768
1769 case BIND_EXPR:
1770 remove_useless_stmts_bind (tp, data);
1771 break;
1772
1773 case GOTO_EXPR:
1774 remove_useless_stmts_goto (tp, data);
1775 break;
1776
1777 case LABEL_EXPR:
1778 remove_useless_stmts_label (tp, data);
1779 break;
1780
1781 case RETURN_EXPR:
1782 fold_stmt (tp);
1783 data->last_goto = NULL;
1784 data->may_branch = true;
1785 break;
1786
1787 case CALL_EXPR:
1788 fold_stmt (tp);
1789 data->last_goto = NULL;
1790 notice_special_calls (t);
1791 update_call_expr_flags (t);
1792 if (tree_could_throw_p (t))
1793 data->may_throw = true;
1794 break;
1795
1796 case MODIFY_EXPR:
1797 gcc_unreachable ();
1798
1799 case GIMPLE_MODIFY_STMT:
1800 data->last_goto = NULL;
1801 fold_stmt (tp);
1802 op = get_call_expr_in (t);
1803 if (op)
1804 {
1805 update_call_expr_flags (op);
1806 notice_special_calls (op);
1807 }
1808 if (tree_could_throw_p (t))
1809 data->may_throw = true;
1810 break;
1811
1812 case STATEMENT_LIST:
1813 {
1814 tree_stmt_iterator i = tsi_start (t);
1815 while (!tsi_end_p (i))
1816 {
1817 t = tsi_stmt (i);
1818 if (IS_EMPTY_STMT (t))
1819 {
1820 tsi_delink (&i);
1821 continue;
1822 }
1823
1824 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1825
1826 t = tsi_stmt (i);
1827 if (TREE_CODE (t) == STATEMENT_LIST)
1828 {
1829 tsi_link_before (&i, t, TSI_SAME_STMT);
1830 tsi_delink (&i);
1831 }
1832 else
1833 tsi_next (&i);
1834 }
1835 }
1836 break;
1837 case ASM_EXPR:
1838 fold_stmt (tp);
1839 data->last_goto = NULL;
1840 break;
1841
1842 default:
1843 data->last_goto = NULL;
1844 break;
1845 }
1846 }
1847
1848 static unsigned int
1849 remove_useless_stmts (void)
1850 {
1851 struct rus_data data;
1852
1853 clear_special_calls ();
1854
1855 do
1856 {
1857 memset (&data, 0, sizeof (data));
1858 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1859 }
1860 while (data.repeat);
1861 return 0;
1862 }
1863
1864
1865 struct tree_opt_pass pass_remove_useless_stmts =
1866 {
1867 "useless", /* name */
1868 NULL, /* gate */
1869 remove_useless_stmts, /* execute */
1870 NULL, /* sub */
1871 NULL, /* next */
1872 0, /* static_pass_number */
1873 0, /* tv_id */
1874 PROP_gimple_any, /* properties_required */
1875 0, /* properties_provided */
1876 0, /* properties_destroyed */
1877 0, /* todo_flags_start */
1878 TODO_dump_func, /* todo_flags_finish */
1879 0 /* letter */
1880 };
1881
1882 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1883
1884 static void
1885 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1886 {
1887 tree phi;
1888
1889 /* Since this block is no longer reachable, we can just delete all
1890 of its PHI nodes. */
1891 phi = phi_nodes (bb);
1892 while (phi)
1893 {
1894 tree next = PHI_CHAIN (phi);
1895 remove_phi_node (phi, NULL_TREE, true);
1896 phi = next;
1897 }
1898
1899 /* Remove edges to BB's successors. */
1900 while (EDGE_COUNT (bb->succs) > 0)
1901 remove_edge (EDGE_SUCC (bb, 0));
1902 }
1903
1904
1905 /* Remove statements of basic block BB. */
1906
1907 static void
1908 remove_bb (basic_block bb)
1909 {
1910 block_stmt_iterator i;
1911 #ifdef USE_MAPPED_LOCATION
1912 source_location loc = UNKNOWN_LOCATION;
1913 #else
1914 source_locus loc = 0;
1915 #endif
1916
1917 if (dump_file)
1918 {
1919 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1920 if (dump_flags & TDF_DETAILS)
1921 {
1922 dump_bb (bb, dump_file, 0);
1923 fprintf (dump_file, "\n");
1924 }
1925 }
1926
1927 if (current_loops)
1928 {
1929 struct loop *loop = bb->loop_father;
1930
1931 /* If a loop gets removed, clean up the information associated
1932 with it. */
1933 if (loop->latch == bb
1934 || loop->header == bb)
1935 free_numbers_of_iterations_estimates_loop (loop);
1936 }
1937
1938 /* Remove all the instructions in the block. */
1939 for (i = bsi_start (bb); !bsi_end_p (i);)
1940 {
1941 tree stmt = bsi_stmt (i);
1942 if (TREE_CODE (stmt) == LABEL_EXPR
1943 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
1944 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
1945 {
1946 basic_block new_bb;
1947 block_stmt_iterator new_bsi;
1948
1949 /* A non-reachable non-local label may still be referenced.
1950 But it no longer needs to carry the extra semantics of
1951 non-locality. */
1952 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1953 {
1954 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
1955 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
1956 }
1957
1958 new_bb = bb->prev_bb;
1959 new_bsi = bsi_start (new_bb);
1960 bsi_remove (&i, false);
1961 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
1962 }
1963 else
1964 {
1965 /* Release SSA definitions if we are in SSA. Note that we
1966 may be called when not in SSA. For example,
1967 final_cleanup calls this function via
1968 cleanup_tree_cfg. */
1969 if (gimple_in_ssa_p (cfun))
1970 release_defs (stmt);
1971
1972 bsi_remove (&i, true);
1973 }
1974
1975 /* Don't warn for removed gotos. Gotos are often removed due to
1976 jump threading, thus resulting in bogus warnings. Not great,
1977 since this way we lose warnings for gotos in the original
1978 program that are indeed unreachable. */
1979 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
1980 {
1981 #ifdef USE_MAPPED_LOCATION
1982 if (EXPR_HAS_LOCATION (stmt))
1983 loc = EXPR_LOCATION (stmt);
1984 #else
1985 source_locus t;
1986 t = EXPR_LOCUS (stmt);
1987 if (t && LOCATION_LINE (*t) > 0)
1988 loc = t;
1989 #endif
1990 }
1991 }
1992
1993 /* If requested, give a warning that the first statement in the
1994 block is unreachable. We walk statements backwards in the
1995 loop above, so the last statement we process is the first statement
1996 in the block. */
1997 #ifdef USE_MAPPED_LOCATION
1998 if (loc > BUILTINS_LOCATION)
1999 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2000 #else
2001 if (loc)
2002 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2003 #endif
2004
2005 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2006 }
2007
2008
2009 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2010 predicate VAL, return the edge that will be taken out of the block.
2011 If VAL does not match a unique edge, NULL is returned. */
2012
2013 edge
2014 find_taken_edge (basic_block bb, tree val)
2015 {
2016 tree stmt;
2017
2018 stmt = last_stmt (bb);
2019
2020 gcc_assert (stmt);
2021 gcc_assert (is_ctrl_stmt (stmt));
2022 gcc_assert (val);
2023
2024 if (! is_gimple_min_invariant (val))
2025 return NULL;
2026
2027 if (TREE_CODE (stmt) == COND_EXPR)
2028 return find_taken_edge_cond_expr (bb, val);
2029
2030 if (TREE_CODE (stmt) == SWITCH_EXPR)
2031 return find_taken_edge_switch_expr (bb, val);
2032
2033 if (computed_goto_p (stmt))
2034 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2035
2036 gcc_unreachable ();
2037 }
2038
2039 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2040 statement, determine which of the outgoing edges will be taken out of the
2041 block. Return NULL if either edge may be taken. */
2042
2043 static edge
2044 find_taken_edge_computed_goto (basic_block bb, tree val)
2045 {
2046 basic_block dest;
2047 edge e = NULL;
2048
2049 dest = label_to_block (val);
2050 if (dest)
2051 {
2052 e = find_edge (bb, dest);
2053 gcc_assert (e != NULL);
2054 }
2055
2056 return e;
2057 }
2058
2059 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2060 statement, determine which of the two edges will be taken out of the
2061 block. Return NULL if either edge may be taken. */
2062
2063 static edge
2064 find_taken_edge_cond_expr (basic_block bb, tree val)
2065 {
2066 edge true_edge, false_edge;
2067
2068 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2069
2070 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2071 return (integer_zerop (val) ? false_edge : true_edge);
2072 }
2073
2074 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2075 statement, determine which edge will be taken out of the block. Return
2076 NULL if any edge may be taken. */
2077
2078 static edge
2079 find_taken_edge_switch_expr (basic_block bb, tree val)
2080 {
2081 tree switch_expr, taken_case;
2082 basic_block dest_bb;
2083 edge e;
2084
2085 switch_expr = last_stmt (bb);
2086 taken_case = find_case_label_for_value (switch_expr, val);
2087 dest_bb = label_to_block (CASE_LABEL (taken_case));
2088
2089 e = find_edge (bb, dest_bb);
2090 gcc_assert (e);
2091 return e;
2092 }
2093
2094
2095 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2096 We can make optimal use here of the fact that the case labels are
2097 sorted: We can do a binary search for a case matching VAL. */
2098
2099 static tree
2100 find_case_label_for_value (tree switch_expr, tree val)
2101 {
2102 tree vec = SWITCH_LABELS (switch_expr);
2103 size_t low, high, n = TREE_VEC_LENGTH (vec);
2104 tree default_case = TREE_VEC_ELT (vec, n - 1);
2105
2106 for (low = -1, high = n - 1; high - low > 1; )
2107 {
2108 size_t i = (high + low) / 2;
2109 tree t = TREE_VEC_ELT (vec, i);
2110 int cmp;
2111
2112 /* Cache the result of comparing CASE_LOW and val. */
2113 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2114
2115 if (cmp > 0)
2116 high = i;
2117 else
2118 low = i;
2119
2120 if (CASE_HIGH (t) == NULL)
2121 {
2122 /* A singe-valued case label. */
2123 if (cmp == 0)
2124 return t;
2125 }
2126 else
2127 {
2128 /* A case range. We can only handle integer ranges. */
2129 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2130 return t;
2131 }
2132 }
2133
2134 return default_case;
2135 }
2136
2137
2138
2139
2140 /*---------------------------------------------------------------------------
2141 Debugging functions
2142 ---------------------------------------------------------------------------*/
2143
2144 /* Dump tree-specific information of block BB to file OUTF. */
2145
2146 void
2147 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2148 {
2149 dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
2150 }
2151
2152
2153 /* Dump a basic block on stderr. */
2154
2155 void
2156 debug_tree_bb (basic_block bb)
2157 {
2158 dump_bb (bb, stderr, 0);
2159 }
2160
2161
2162 /* Dump basic block with index N on stderr. */
2163
2164 basic_block
2165 debug_tree_bb_n (int n)
2166 {
2167 debug_tree_bb (BASIC_BLOCK (n));
2168 return BASIC_BLOCK (n);
2169 }
2170
2171
2172 /* Dump the CFG on stderr.
2173
2174 FLAGS are the same used by the tree dumping functions
2175 (see TDF_* in tree-pass.h). */
2176
2177 void
2178 debug_tree_cfg (int flags)
2179 {
2180 dump_tree_cfg (stderr, flags);
2181 }
2182
2183
2184 /* Dump the program showing basic block boundaries on the given FILE.
2185
2186 FLAGS are the same used by the tree dumping functions (see TDF_* in
2187 tree.h). */
2188
2189 void
2190 dump_tree_cfg (FILE *file, int flags)
2191 {
2192 if (flags & TDF_DETAILS)
2193 {
2194 const char *funcname
2195 = lang_hooks.decl_printable_name (current_function_decl, 2);
2196
2197 fputc ('\n', file);
2198 fprintf (file, ";; Function %s\n\n", funcname);
2199 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2200 n_basic_blocks, n_edges, last_basic_block);
2201
2202 brief_dump_cfg (file);
2203 fprintf (file, "\n");
2204 }
2205
2206 if (flags & TDF_STATS)
2207 dump_cfg_stats (file);
2208
2209 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2210 }
2211
2212
2213 /* Dump CFG statistics on FILE. */
2214
2215 void
2216 dump_cfg_stats (FILE *file)
2217 {
2218 static long max_num_merged_labels = 0;
2219 unsigned long size, total = 0;
2220 long num_edges;
2221 basic_block bb;
2222 const char * const fmt_str = "%-30s%-13s%12s\n";
2223 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2224 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2225 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2226 const char *funcname
2227 = lang_hooks.decl_printable_name (current_function_decl, 2);
2228
2229
2230 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2231
2232 fprintf (file, "---------------------------------------------------------\n");
2233 fprintf (file, fmt_str, "", " Number of ", "Memory");
2234 fprintf (file, fmt_str, "", " instances ", "used ");
2235 fprintf (file, "---------------------------------------------------------\n");
2236
2237 size = n_basic_blocks * sizeof (struct basic_block_def);
2238 total += size;
2239 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2240 SCALE (size), LABEL (size));
2241
2242 num_edges = 0;
2243 FOR_EACH_BB (bb)
2244 num_edges += EDGE_COUNT (bb->succs);
2245 size = num_edges * sizeof (struct edge_def);
2246 total += size;
2247 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2248
2249 fprintf (file, "---------------------------------------------------------\n");
2250 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2251 LABEL (total));
2252 fprintf (file, "---------------------------------------------------------\n");
2253 fprintf (file, "\n");
2254
2255 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2256 max_num_merged_labels = cfg_stats.num_merged_labels;
2257
2258 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2259 cfg_stats.num_merged_labels, max_num_merged_labels);
2260
2261 fprintf (file, "\n");
2262 }
2263
2264
2265 /* Dump CFG statistics on stderr. Keep extern so that it's always
2266 linked in the final executable. */
2267
2268 void
2269 debug_cfg_stats (void)
2270 {
2271 dump_cfg_stats (stderr);
2272 }
2273
2274
2275 /* Dump the flowgraph to a .vcg FILE. */
2276
2277 static void
2278 tree_cfg2vcg (FILE *file)
2279 {
2280 edge e;
2281 edge_iterator ei;
2282 basic_block bb;
2283 const char *funcname
2284 = lang_hooks.decl_printable_name (current_function_decl, 2);
2285
2286 /* Write the file header. */
2287 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2288 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2289 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2290
2291 /* Write blocks and edges. */
2292 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2293 {
2294 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2295 e->dest->index);
2296
2297 if (e->flags & EDGE_FAKE)
2298 fprintf (file, " linestyle: dotted priority: 10");
2299 else
2300 fprintf (file, " linestyle: solid priority: 100");
2301
2302 fprintf (file, " }\n");
2303 }
2304 fputc ('\n', file);
2305
2306 FOR_EACH_BB (bb)
2307 {
2308 enum tree_code head_code, end_code;
2309 const char *head_name, *end_name;
2310 int head_line = 0;
2311 int end_line = 0;
2312 tree first = first_stmt (bb);
2313 tree last = last_stmt (bb);
2314
2315 if (first)
2316 {
2317 head_code = TREE_CODE (first);
2318 head_name = tree_code_name[head_code];
2319 head_line = get_lineno (first);
2320 }
2321 else
2322 head_name = "no-statement";
2323
2324 if (last)
2325 {
2326 end_code = TREE_CODE (last);
2327 end_name = tree_code_name[end_code];
2328 end_line = get_lineno (last);
2329 }
2330 else
2331 end_name = "no-statement";
2332
2333 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2334 bb->index, bb->index, head_name, head_line, end_name,
2335 end_line);
2336
2337 FOR_EACH_EDGE (e, ei, bb->succs)
2338 {
2339 if (e->dest == EXIT_BLOCK_PTR)
2340 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2341 else
2342 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2343
2344 if (e->flags & EDGE_FAKE)
2345 fprintf (file, " priority: 10 linestyle: dotted");
2346 else
2347 fprintf (file, " priority: 100 linestyle: solid");
2348
2349 fprintf (file, " }\n");
2350 }
2351
2352 if (bb->next_bb != EXIT_BLOCK_PTR)
2353 fputc ('\n', file);
2354 }
2355
2356 fputs ("}\n\n", file);
2357 }
2358
2359
2360
2361 /*---------------------------------------------------------------------------
2362 Miscellaneous helpers
2363 ---------------------------------------------------------------------------*/
2364
2365 /* Return true if T represents a stmt that always transfers control. */
2366
2367 bool
2368 is_ctrl_stmt (tree t)
2369 {
2370 return (TREE_CODE (t) == COND_EXPR
2371 || TREE_CODE (t) == SWITCH_EXPR
2372 || TREE_CODE (t) == GOTO_EXPR
2373 || TREE_CODE (t) == RETURN_EXPR
2374 || TREE_CODE (t) == RESX_EXPR);
2375 }
2376
2377
2378 /* Return true if T is a statement that may alter the flow of control
2379 (e.g., a call to a non-returning function). */
2380
2381 bool
2382 is_ctrl_altering_stmt (tree t)
2383 {
2384 tree call;
2385
2386 gcc_assert (t);
2387 call = get_call_expr_in (t);
2388 if (call)
2389 {
2390 /* A non-pure/const CALL_EXPR alters flow control if the current
2391 function has nonlocal labels. */
2392 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2393 return true;
2394
2395 /* A CALL_EXPR also alters control flow if it does not return. */
2396 if (call_expr_flags (call) & ECF_NORETURN)
2397 return true;
2398 }
2399
2400 /* OpenMP directives alter control flow. */
2401 if (OMP_DIRECTIVE_P (t))
2402 return true;
2403
2404 /* If a statement can throw, it alters control flow. */
2405 return tree_can_throw_internal (t);
2406 }
2407
2408
2409 /* Return true if T is a computed goto. */
2410
2411 bool
2412 computed_goto_p (tree t)
2413 {
2414 return (TREE_CODE (t) == GOTO_EXPR
2415 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2416 }
2417
2418
2419 /* Return true if T is a simple local goto. */
2420
2421 bool
2422 simple_goto_p (tree t)
2423 {
2424 return (TREE_CODE (t) == GOTO_EXPR
2425 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2426 }
2427
2428
2429 /* Return true if T can make an abnormal transfer of control flow.
2430 Transfers of control flow associated with EH are excluded. */
2431
2432 bool
2433 tree_can_make_abnormal_goto (tree t)
2434 {
2435 if (computed_goto_p (t))
2436 return true;
2437 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2438 t = GIMPLE_STMT_OPERAND (t, 1);
2439 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2440 t = TREE_OPERAND (t, 0);
2441 if (TREE_CODE (t) == CALL_EXPR)
2442 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2443 return false;
2444 }
2445
2446
2447 /* Return true if T should start a new basic block. PREV_T is the
2448 statement preceding T. It is used when T is a label or a case label.
2449 Labels should only start a new basic block if their previous statement
2450 wasn't a label. Otherwise, sequence of labels would generate
2451 unnecessary basic blocks that only contain a single label. */
2452
2453 static inline bool
2454 stmt_starts_bb_p (tree t, tree prev_t)
2455 {
2456 if (t == NULL_TREE)
2457 return false;
2458
2459 /* LABEL_EXPRs start a new basic block only if the preceding
2460 statement wasn't a label of the same type. This prevents the
2461 creation of consecutive blocks that have nothing but a single
2462 label. */
2463 if (TREE_CODE (t) == LABEL_EXPR)
2464 {
2465 /* Nonlocal and computed GOTO targets always start a new block. */
2466 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2467 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2468 return true;
2469
2470 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2471 {
2472 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2473 return true;
2474
2475 cfg_stats.num_merged_labels++;
2476 return false;
2477 }
2478 else
2479 return true;
2480 }
2481
2482 return false;
2483 }
2484
2485
2486 /* Return true if T should end a basic block. */
2487
2488 bool
2489 stmt_ends_bb_p (tree t)
2490 {
2491 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2492 }
2493
2494
2495 /* Add gotos that used to be represented implicitly in the CFG. */
2496
2497 void
2498 disband_implicit_edges (void)
2499 {
2500 basic_block bb;
2501 block_stmt_iterator last;
2502 edge e;
2503 edge_iterator ei;
2504 tree stmt, label;
2505
2506 FOR_EACH_BB (bb)
2507 {
2508 last = bsi_last (bb);
2509 stmt = last_stmt (bb);
2510
2511 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2512 {
2513 /* Remove superfluous gotos from COND_EXPR branches. Moved
2514 from cfg_remove_useless_stmts here since it violates the
2515 invariants for tree--cfg correspondence and thus fits better
2516 here where we do it anyway. */
2517 e = find_edge (bb, bb->next_bb);
2518 if (e)
2519 {
2520 if (e->flags & EDGE_TRUE_VALUE)
2521 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2522 else if (e->flags & EDGE_FALSE_VALUE)
2523 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2524 else
2525 gcc_unreachable ();
2526 e->flags |= EDGE_FALLTHRU;
2527 }
2528
2529 continue;
2530 }
2531
2532 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2533 {
2534 /* Remove the RETURN_EXPR if we may fall though to the exit
2535 instead. */
2536 gcc_assert (single_succ_p (bb));
2537 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2538
2539 if (bb->next_bb == EXIT_BLOCK_PTR
2540 && !TREE_OPERAND (stmt, 0))
2541 {
2542 bsi_remove (&last, true);
2543 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2544 }
2545 continue;
2546 }
2547
2548 /* There can be no fallthru edge if the last statement is a control
2549 one. */
2550 if (stmt && is_ctrl_stmt (stmt))
2551 continue;
2552
2553 /* Find a fallthru edge and emit the goto if necessary. */
2554 FOR_EACH_EDGE (e, ei, bb->succs)
2555 if (e->flags & EDGE_FALLTHRU)
2556 break;
2557
2558 if (!e || e->dest == bb->next_bb)
2559 continue;
2560
2561 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2562 label = tree_block_label (e->dest);
2563
2564 stmt = build1 (GOTO_EXPR, void_type_node, label);
2565 #ifdef USE_MAPPED_LOCATION
2566 SET_EXPR_LOCATION (stmt, e->goto_locus);
2567 #else
2568 SET_EXPR_LOCUS (stmt, e->goto_locus);
2569 #endif
2570 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2571 e->flags &= ~EDGE_FALLTHRU;
2572 }
2573 }
2574
2575 /* Remove block annotations and other datastructures. */
2576
2577 void
2578 delete_tree_cfg_annotations (void)
2579 {
2580 basic_block bb;
2581 block_stmt_iterator bsi;
2582
2583 /* Remove annotations from every tree in the function. */
2584 FOR_EACH_BB (bb)
2585 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2586 {
2587 tree stmt = bsi_stmt (bsi);
2588 ggc_free (stmt->base.ann);
2589 stmt->base.ann = NULL;
2590 }
2591 label_to_block_map = NULL;
2592 }
2593
2594
2595 /* Return the first statement in basic block BB. */
2596
2597 tree
2598 first_stmt (basic_block bb)
2599 {
2600 block_stmt_iterator i = bsi_start (bb);
2601 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2602 }
2603
2604
2605 /* Return the last statement in basic block BB. */
2606
2607 tree
2608 last_stmt (basic_block bb)
2609 {
2610 block_stmt_iterator b = bsi_last (bb);
2611 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2612 }
2613
2614
2615 /* Return the last statement of an otherwise empty block. Return NULL
2616 if the block is totally empty, or if it contains more than one
2617 statement. */
2618
2619 tree
2620 last_and_only_stmt (basic_block bb)
2621 {
2622 block_stmt_iterator i = bsi_last (bb);
2623 tree last, prev;
2624
2625 if (bsi_end_p (i))
2626 return NULL_TREE;
2627
2628 last = bsi_stmt (i);
2629 bsi_prev (&i);
2630 if (bsi_end_p (i))
2631 return last;
2632
2633 /* Empty statements should no longer appear in the instruction stream.
2634 Everything that might have appeared before should be deleted by
2635 remove_useless_stmts, and the optimizers should just bsi_remove
2636 instead of smashing with build_empty_stmt.
2637
2638 Thus the only thing that should appear here in a block containing
2639 one executable statement is a label. */
2640 prev = bsi_stmt (i);
2641 if (TREE_CODE (prev) == LABEL_EXPR)
2642 return last;
2643 else
2644 return NULL_TREE;
2645 }
2646
2647
2648 /* Mark BB as the basic block holding statement T. */
2649
2650 void
2651 set_bb_for_stmt (tree t, basic_block bb)
2652 {
2653 if (TREE_CODE (t) == PHI_NODE)
2654 PHI_BB (t) = bb;
2655 else if (TREE_CODE (t) == STATEMENT_LIST)
2656 {
2657 tree_stmt_iterator i;
2658 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2659 set_bb_for_stmt (tsi_stmt (i), bb);
2660 }
2661 else
2662 {
2663 stmt_ann_t ann = get_stmt_ann (t);
2664 ann->bb = bb;
2665
2666 /* If the statement is a label, add the label to block-to-labels map
2667 so that we can speed up edge creation for GOTO_EXPRs. */
2668 if (TREE_CODE (t) == LABEL_EXPR)
2669 {
2670 int uid;
2671
2672 t = LABEL_EXPR_LABEL (t);
2673 uid = LABEL_DECL_UID (t);
2674 if (uid == -1)
2675 {
2676 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2677 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2678 if (old_len <= (unsigned) uid)
2679 {
2680 unsigned new_len = 3 * uid / 2;
2681
2682 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2683 new_len);
2684 }
2685 }
2686 else
2687 /* We're moving an existing label. Make sure that we've
2688 removed it from the old block. */
2689 gcc_assert (!bb
2690 || !VEC_index (basic_block, label_to_block_map, uid));
2691 VEC_replace (basic_block, label_to_block_map, uid, bb);
2692 }
2693 }
2694 }
2695
2696 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2697 from one basic block to another.
2698 For BB splitting we can run into quadratic case, so performance is quite
2699 important and knowing that the tables are big enough, change_bb_for_stmt
2700 can inline as leaf function. */
2701 static inline void
2702 change_bb_for_stmt (tree t, basic_block bb)
2703 {
2704 get_stmt_ann (t)->bb = bb;
2705 if (TREE_CODE (t) == LABEL_EXPR)
2706 VEC_replace (basic_block, label_to_block_map,
2707 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2708 }
2709
2710 /* Finds iterator for STMT. */
2711
2712 extern block_stmt_iterator
2713 bsi_for_stmt (tree stmt)
2714 {
2715 block_stmt_iterator bsi;
2716
2717 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2718 if (bsi_stmt (bsi) == stmt)
2719 return bsi;
2720
2721 gcc_unreachable ();
2722 }
2723
2724 /* Mark statement T as modified, and update it. */
2725 static inline void
2726 update_modified_stmts (tree t)
2727 {
2728 if (!ssa_operands_active ())
2729 return;
2730 if (TREE_CODE (t) == STATEMENT_LIST)
2731 {
2732 tree_stmt_iterator i;
2733 tree stmt;
2734 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2735 {
2736 stmt = tsi_stmt (i);
2737 update_stmt_if_modified (stmt);
2738 }
2739 }
2740 else
2741 update_stmt_if_modified (t);
2742 }
2743
2744 /* Insert statement (or statement list) T before the statement
2745 pointed-to by iterator I. M specifies how to update iterator I
2746 after insertion (see enum bsi_iterator_update). */
2747
2748 void
2749 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2750 {
2751 set_bb_for_stmt (t, i->bb);
2752 update_modified_stmts (t);
2753 tsi_link_before (&i->tsi, t, m);
2754 }
2755
2756
2757 /* Insert statement (or statement list) T after the statement
2758 pointed-to by iterator I. M specifies how to update iterator I
2759 after insertion (see enum bsi_iterator_update). */
2760
2761 void
2762 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2763 {
2764 set_bb_for_stmt (t, i->bb);
2765 update_modified_stmts (t);
2766 tsi_link_after (&i->tsi, t, m);
2767 }
2768
2769
2770 /* Remove the statement pointed to by iterator I. The iterator is updated
2771 to the next statement.
2772
2773 When REMOVE_EH_INFO is true we remove the statement pointed to by
2774 iterator I from the EH tables. Otherwise we do not modify the EH
2775 tables.
2776
2777 Generally, REMOVE_EH_INFO should be true when the statement is going to
2778 be removed from the IL and not reinserted elsewhere. */
2779
2780 void
2781 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2782 {
2783 tree t = bsi_stmt (*i);
2784 set_bb_for_stmt (t, NULL);
2785 delink_stmt_imm_use (t);
2786 tsi_delink (&i->tsi);
2787 mark_stmt_modified (t);
2788 if (remove_eh_info)
2789 {
2790 remove_stmt_from_eh_region (t);
2791 gimple_remove_stmt_histograms (cfun, t);
2792 }
2793 }
2794
2795
2796 /* Move the statement at FROM so it comes right after the statement at TO. */
2797
2798 void
2799 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2800 {
2801 tree stmt = bsi_stmt (*from);
2802 bsi_remove (from, false);
2803 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2804 }
2805
2806
2807 /* Move the statement at FROM so it comes right before the statement at TO. */
2808
2809 void
2810 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2811 {
2812 tree stmt = bsi_stmt (*from);
2813 bsi_remove (from, false);
2814 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2815 }
2816
2817
2818 /* Move the statement at FROM to the end of basic block BB. */
2819
2820 void
2821 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2822 {
2823 block_stmt_iterator last = bsi_last (bb);
2824
2825 /* Have to check bsi_end_p because it could be an empty block. */
2826 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2827 bsi_move_before (from, &last);
2828 else
2829 bsi_move_after (from, &last);
2830 }
2831
2832
2833 /* Replace the contents of the statement pointed to by iterator BSI
2834 with STMT. If UPDATE_EH_INFO is true, the exception handling
2835 information of the original statement is moved to the new statement. */
2836
2837 void
2838 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2839 {
2840 int eh_region;
2841 tree orig_stmt = bsi_stmt (*bsi);
2842
2843 if (stmt == orig_stmt)
2844 return;
2845 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2846 set_bb_for_stmt (stmt, bsi->bb);
2847
2848 /* Preserve EH region information from the original statement, if
2849 requested by the caller. */
2850 if (update_eh_info)
2851 {
2852 eh_region = lookup_stmt_eh_region (orig_stmt);
2853 if (eh_region >= 0)
2854 {
2855 remove_stmt_from_eh_region (orig_stmt);
2856 add_stmt_to_eh_region (stmt, eh_region);
2857 }
2858 }
2859
2860 gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
2861 gimple_remove_stmt_histograms (cfun, orig_stmt);
2862 delink_stmt_imm_use (orig_stmt);
2863 *bsi_stmt_ptr (*bsi) = stmt;
2864 mark_stmt_modified (stmt);
2865 update_modified_stmts (stmt);
2866 }
2867
2868
2869 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2870 is made to place the statement in an existing basic block, but
2871 sometimes that isn't possible. When it isn't possible, the edge is
2872 split and the statement is added to the new block.
2873
2874 In all cases, the returned *BSI points to the correct location. The
2875 return value is true if insertion should be done after the location,
2876 or false if it should be done before the location. If new basic block
2877 has to be created, it is stored in *NEW_BB. */
2878
2879 static bool
2880 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2881 basic_block *new_bb)
2882 {
2883 basic_block dest, src;
2884 tree tmp;
2885
2886 dest = e->dest;
2887 restart:
2888
2889 /* If the destination has one predecessor which has no PHI nodes,
2890 insert there. Except for the exit block.
2891
2892 The requirement for no PHI nodes could be relaxed. Basically we
2893 would have to examine the PHIs to prove that none of them used
2894 the value set by the statement we want to insert on E. That
2895 hardly seems worth the effort. */
2896 if (single_pred_p (dest)
2897 && ! phi_nodes (dest)
2898 && dest != EXIT_BLOCK_PTR)
2899 {
2900 *bsi = bsi_start (dest);
2901 if (bsi_end_p (*bsi))
2902 return true;
2903
2904 /* Make sure we insert after any leading labels. */
2905 tmp = bsi_stmt (*bsi);
2906 while (TREE_CODE (tmp) == LABEL_EXPR)
2907 {
2908 bsi_next (bsi);
2909 if (bsi_end_p (*bsi))
2910 break;
2911 tmp = bsi_stmt (*bsi);
2912 }
2913
2914 if (bsi_end_p (*bsi))
2915 {
2916 *bsi = bsi_last (dest);
2917 return true;
2918 }
2919 else
2920 return false;
2921 }
2922
2923 /* If the source has one successor, the edge is not abnormal and
2924 the last statement does not end a basic block, insert there.
2925 Except for the entry block. */
2926 src = e->src;
2927 if ((e->flags & EDGE_ABNORMAL) == 0
2928 && single_succ_p (src)
2929 && src != ENTRY_BLOCK_PTR)
2930 {
2931 *bsi = bsi_last (src);
2932 if (bsi_end_p (*bsi))
2933 return true;
2934
2935 tmp = bsi_stmt (*bsi);
2936 if (!stmt_ends_bb_p (tmp))
2937 return true;
2938
2939 /* Insert code just before returning the value. We may need to decompose
2940 the return in the case it contains non-trivial operand. */
2941 if (TREE_CODE (tmp) == RETURN_EXPR)
2942 {
2943 tree op = TREE_OPERAND (tmp, 0);
2944 if (op && !is_gimple_val (op))
2945 {
2946 gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
2947 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2948 TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
2949 }
2950 bsi_prev (bsi);
2951 return true;
2952 }
2953 }
2954
2955 /* Otherwise, create a new basic block, and split this edge. */
2956 dest = split_edge (e);
2957 if (new_bb)
2958 *new_bb = dest;
2959 e = single_pred_edge (dest);
2960 goto restart;
2961 }
2962
2963
2964 /* This routine will commit all pending edge insertions, creating any new
2965 basic blocks which are necessary. */
2966
2967 void
2968 bsi_commit_edge_inserts (void)
2969 {
2970 basic_block bb;
2971 edge e;
2972 edge_iterator ei;
2973
2974 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2975
2976 FOR_EACH_BB (bb)
2977 FOR_EACH_EDGE (e, ei, bb->succs)
2978 bsi_commit_one_edge_insert (e, NULL);
2979 }
2980
2981
2982 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2983 to this block, otherwise set it to NULL. */
2984
2985 void
2986 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2987 {
2988 if (new_bb)
2989 *new_bb = NULL;
2990 if (PENDING_STMT (e))
2991 {
2992 block_stmt_iterator bsi;
2993 tree stmt = PENDING_STMT (e);
2994
2995 PENDING_STMT (e) = NULL_TREE;
2996
2997 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
2998 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2999 else
3000 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3001 }
3002 }
3003
3004
3005 /* Add STMT to the pending list of edge E. No actual insertion is
3006 made until a call to bsi_commit_edge_inserts () is made. */
3007
3008 void
3009 bsi_insert_on_edge (edge e, tree stmt)
3010 {
3011 append_to_statement_list (stmt, &PENDING_STMT (e));
3012 }
3013
3014 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3015 block has to be created, it is returned. */
3016
3017 basic_block
3018 bsi_insert_on_edge_immediate (edge e, tree stmt)
3019 {
3020 block_stmt_iterator bsi;
3021 basic_block new_bb = NULL;
3022
3023 gcc_assert (!PENDING_STMT (e));
3024
3025 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3026 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3027 else
3028 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3029
3030 return new_bb;
3031 }
3032
3033 /*---------------------------------------------------------------------------
3034 Tree specific functions for CFG manipulation
3035 ---------------------------------------------------------------------------*/
3036
3037 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3038
3039 static void
3040 reinstall_phi_args (edge new_edge, edge old_edge)
3041 {
3042 tree var, phi;
3043
3044 if (!PENDING_STMT (old_edge))
3045 return;
3046
3047 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3048 var && phi;
3049 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3050 {
3051 tree result = TREE_PURPOSE (var);
3052 tree arg = TREE_VALUE (var);
3053
3054 gcc_assert (result == PHI_RESULT (phi));
3055
3056 add_phi_arg (phi, arg, new_edge);
3057 }
3058
3059 PENDING_STMT (old_edge) = NULL;
3060 }
3061
3062 /* Returns the basic block after which the new basic block created
3063 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3064 near its "logical" location. This is of most help to humans looking
3065 at debugging dumps. */
3066
3067 static basic_block
3068 split_edge_bb_loc (edge edge_in)
3069 {
3070 basic_block dest = edge_in->dest;
3071
3072 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3073 return edge_in->src;
3074 else
3075 return dest->prev_bb;
3076 }
3077
3078 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3079 Abort on abnormal edges. */
3080
3081 static basic_block
3082 tree_split_edge (edge edge_in)
3083 {
3084 basic_block new_bb, after_bb, dest;
3085 edge new_edge, e;
3086
3087 /* Abnormal edges cannot be split. */
3088 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3089
3090 dest = edge_in->dest;
3091
3092 after_bb = split_edge_bb_loc (edge_in);
3093
3094 new_bb = create_empty_bb (after_bb);
3095 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3096 new_bb->count = edge_in->count;
3097 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3098 new_edge->probability = REG_BR_PROB_BASE;
3099 new_edge->count = edge_in->count;
3100
3101 e = redirect_edge_and_branch (edge_in, new_bb);
3102 gcc_assert (e);
3103 reinstall_phi_args (new_edge, e);
3104
3105 return new_bb;
3106 }
3107
3108
3109 /* Return true when BB has label LABEL in it. */
3110
3111 static bool
3112 has_label_p (basic_block bb, tree label)
3113 {
3114 block_stmt_iterator bsi;
3115
3116 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3117 {
3118 tree stmt = bsi_stmt (bsi);
3119
3120 if (TREE_CODE (stmt) != LABEL_EXPR)
3121 return false;
3122 if (LABEL_EXPR_LABEL (stmt) == label)
3123 return true;
3124 }
3125 return false;
3126 }
3127
3128
3129 /* Callback for walk_tree, check that all elements with address taken are
3130 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3131 inside a PHI node. */
3132
3133 static tree
3134 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3135 {
3136 tree t = *tp, x;
3137 bool in_phi = (data != NULL);
3138
3139 if (TYPE_P (t))
3140 *walk_subtrees = 0;
3141
3142 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3143 #define CHECK_OP(N, MSG) \
3144 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3145 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3146
3147 switch (TREE_CODE (t))
3148 {
3149 case SSA_NAME:
3150 if (SSA_NAME_IN_FREE_LIST (t))
3151 {
3152 error ("SSA name in freelist but still referenced");
3153 return *tp;
3154 }
3155 break;
3156
3157 case ASSERT_EXPR:
3158 x = fold (ASSERT_EXPR_COND (t));
3159 if (x == boolean_false_node)
3160 {
3161 error ("ASSERT_EXPR with an always-false condition");
3162 return *tp;
3163 }
3164 break;
3165
3166 case MODIFY_EXPR:
3167 gcc_unreachable ();
3168
3169 case GIMPLE_MODIFY_STMT:
3170 x = GIMPLE_STMT_OPERAND (t, 0);
3171 if (TREE_CODE (x) == BIT_FIELD_REF
3172 && is_gimple_reg (TREE_OPERAND (x, 0)))
3173 {
3174 error ("GIMPLE register modified with BIT_FIELD_REF");
3175 return t;
3176 }
3177 break;
3178
3179 case ADDR_EXPR:
3180 {
3181 bool old_invariant;
3182 bool old_constant;
3183 bool old_side_effects;
3184 bool new_invariant;
3185 bool new_constant;
3186 bool new_side_effects;
3187
3188 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3189 dead PHIs that take the address of something. But if the PHI
3190 result is dead, the fact that it takes the address of anything
3191 is irrelevant. Because we can not tell from here if a PHI result
3192 is dead, we just skip this check for PHIs altogether. This means
3193 we may be missing "valid" checks, but what can you do?
3194 This was PR19217. */
3195 if (in_phi)
3196 break;
3197
3198 old_invariant = TREE_INVARIANT (t);
3199 old_constant = TREE_CONSTANT (t);
3200 old_side_effects = TREE_SIDE_EFFECTS (t);
3201
3202 recompute_tree_invariant_for_addr_expr (t);
3203 new_invariant = TREE_INVARIANT (t);
3204 new_side_effects = TREE_SIDE_EFFECTS (t);
3205 new_constant = TREE_CONSTANT (t);
3206
3207 if (old_invariant != new_invariant)
3208 {
3209 error ("invariant not recomputed when ADDR_EXPR changed");
3210 return t;
3211 }
3212
3213 if (old_constant != new_constant)
3214 {
3215 error ("constant not recomputed when ADDR_EXPR changed");
3216 return t;
3217 }
3218 if (old_side_effects != new_side_effects)
3219 {
3220 error ("side effects not recomputed when ADDR_EXPR changed");
3221 return t;
3222 }
3223
3224 /* Skip any references (they will be checked when we recurse down the
3225 tree) and ensure that any variable used as a prefix is marked
3226 addressable. */
3227 for (x = TREE_OPERAND (t, 0);
3228 handled_component_p (x);
3229 x = TREE_OPERAND (x, 0))
3230 ;
3231
3232 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3233 return NULL;
3234 if (!TREE_ADDRESSABLE (x))
3235 {
3236 error ("address taken, but ADDRESSABLE bit not set");
3237 return x;
3238 }
3239 break;
3240 }
3241
3242 case COND_EXPR:
3243 x = COND_EXPR_COND (t);
3244 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3245 {
3246 error ("non-boolean used in condition");
3247 return x;
3248 }
3249 if (!is_gimple_condexpr (x))
3250 {
3251 error ("invalid conditional operand");
3252 return x;
3253 }
3254 break;
3255
3256 case NOP_EXPR:
3257 case CONVERT_EXPR:
3258 case FIX_TRUNC_EXPR:
3259 case FLOAT_EXPR:
3260 case NEGATE_EXPR:
3261 case ABS_EXPR:
3262 case BIT_NOT_EXPR:
3263 case NON_LVALUE_EXPR:
3264 case TRUTH_NOT_EXPR:
3265 CHECK_OP (0, "invalid operand to unary operator");
3266 break;
3267
3268 case REALPART_EXPR:
3269 case IMAGPART_EXPR:
3270 case COMPONENT_REF:
3271 case ARRAY_REF:
3272 case ARRAY_RANGE_REF:
3273 case BIT_FIELD_REF:
3274 case VIEW_CONVERT_EXPR:
3275 /* We have a nest of references. Verify that each of the operands
3276 that determine where to reference is either a constant or a variable,
3277 verify that the base is valid, and then show we've already checked
3278 the subtrees. */
3279 while (handled_component_p (t))
3280 {
3281 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3282 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3283 else if (TREE_CODE (t) == ARRAY_REF
3284 || TREE_CODE (t) == ARRAY_RANGE_REF)
3285 {
3286 CHECK_OP (1, "invalid array index");
3287 if (TREE_OPERAND (t, 2))
3288 CHECK_OP (2, "invalid array lower bound");
3289 if (TREE_OPERAND (t, 3))
3290 CHECK_OP (3, "invalid array stride");
3291 }
3292 else if (TREE_CODE (t) == BIT_FIELD_REF)
3293 {
3294 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3295 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3296 }
3297
3298 t = TREE_OPERAND (t, 0);
3299 }
3300
3301 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3302 {
3303 error ("invalid reference prefix");
3304 return t;
3305 }
3306 *walk_subtrees = 0;
3307 break;
3308
3309 case LT_EXPR:
3310 case LE_EXPR:
3311 case GT_EXPR:
3312 case GE_EXPR:
3313 case EQ_EXPR:
3314 case NE_EXPR:
3315 case UNORDERED_EXPR:
3316 case ORDERED_EXPR:
3317 case UNLT_EXPR:
3318 case UNLE_EXPR:
3319 case UNGT_EXPR:
3320 case UNGE_EXPR:
3321 case UNEQ_EXPR:
3322 case LTGT_EXPR:
3323 case PLUS_EXPR:
3324 case MINUS_EXPR:
3325 case MULT_EXPR:
3326 case TRUNC_DIV_EXPR:
3327 case CEIL_DIV_EXPR:
3328 case FLOOR_DIV_EXPR:
3329 case ROUND_DIV_EXPR:
3330 case TRUNC_MOD_EXPR:
3331 case CEIL_MOD_EXPR:
3332 case FLOOR_MOD_EXPR:
3333 case ROUND_MOD_EXPR:
3334 case RDIV_EXPR:
3335 case EXACT_DIV_EXPR:
3336 case MIN_EXPR:
3337 case MAX_EXPR:
3338 case LSHIFT_EXPR:
3339 case RSHIFT_EXPR:
3340 case LROTATE_EXPR:
3341 case RROTATE_EXPR:
3342 case BIT_IOR_EXPR:
3343 case BIT_XOR_EXPR:
3344 case BIT_AND_EXPR:
3345 CHECK_OP (0, "invalid operand to binary operator");
3346 CHECK_OP (1, "invalid operand to binary operator");
3347 break;
3348
3349 case CONSTRUCTOR:
3350 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3351 *walk_subtrees = 0;
3352 break;
3353
3354 default:
3355 break;
3356 }
3357 return NULL;
3358
3359 #undef CHECK_OP
3360 }
3361
3362
3363 /* Verify STMT, return true if STMT is not in GIMPLE form.
3364 TODO: Implement type checking. */
3365
3366 static bool
3367 verify_stmt (tree stmt, bool last_in_block)
3368 {
3369 tree addr;
3370
3371 if (OMP_DIRECTIVE_P (stmt))
3372 {
3373 /* OpenMP directives are validated by the FE and never operated
3374 on by the optimizers. Furthermore, OMP_FOR may contain
3375 non-gimple expressions when the main index variable has had
3376 its address taken. This does not affect the loop itself
3377 because the header of an OMP_FOR is merely used to determine
3378 how to setup the parallel iteration. */
3379 return false;
3380 }
3381
3382 if (!is_gimple_stmt (stmt))
3383 {
3384 error ("is not a valid GIMPLE statement");
3385 goto fail;
3386 }
3387
3388 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3389 if (addr)
3390 {
3391 debug_generic_stmt (addr);
3392 return true;
3393 }
3394
3395 /* If the statement is marked as part of an EH region, then it is
3396 expected that the statement could throw. Verify that when we
3397 have optimizations that simplify statements such that we prove
3398 that they cannot throw, that we update other data structures
3399 to match. */
3400 if (lookup_stmt_eh_region (stmt) >= 0)
3401 {
3402 if (!tree_could_throw_p (stmt))
3403 {
3404 error ("statement marked for throw, but doesn%'t");
3405 goto fail;
3406 }
3407 if (!last_in_block && tree_can_throw_internal (stmt))
3408 {
3409 error ("statement marked for throw in middle of block");
3410 goto fail;
3411 }
3412 }
3413
3414 return false;
3415
3416 fail:
3417 debug_generic_stmt (stmt);
3418 return true;
3419 }
3420
3421
3422 /* Return true when the T can be shared. */
3423
3424 static bool
3425 tree_node_can_be_shared (tree t)
3426 {
3427 if (IS_TYPE_OR_DECL_P (t)
3428 || is_gimple_min_invariant (t)
3429 || TREE_CODE (t) == SSA_NAME
3430 || t == error_mark_node
3431 || TREE_CODE (t) == IDENTIFIER_NODE)
3432 return true;
3433
3434 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3435 return true;
3436
3437 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3438 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3439 || TREE_CODE (t) == COMPONENT_REF
3440 || TREE_CODE (t) == REALPART_EXPR
3441 || TREE_CODE (t) == IMAGPART_EXPR)
3442 t = TREE_OPERAND (t, 0);
3443
3444 if (DECL_P (t))
3445 return true;
3446
3447 return false;
3448 }
3449
3450
3451 /* Called via walk_trees. Verify tree sharing. */
3452
3453 static tree
3454 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3455 {
3456 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3457
3458 if (tree_node_can_be_shared (*tp))
3459 {
3460 *walk_subtrees = false;
3461 return NULL;
3462 }
3463
3464 if (pointer_set_insert (visited, *tp))
3465 return *tp;
3466
3467 return NULL;
3468 }
3469
3470
3471 /* Helper function for verify_gimple_tuples. */
3472
3473 static tree
3474 verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3475 void *data ATTRIBUTE_UNUSED)
3476 {
3477 switch (TREE_CODE (*tp))
3478 {
3479 case MODIFY_EXPR:
3480 error ("unexpected non-tuple");
3481 debug_tree (*tp);
3482 gcc_unreachable ();
3483 return NULL_TREE;
3484
3485 default:
3486 return NULL_TREE;
3487 }
3488 }
3489
3490 /* Verify that there are no trees that should have been converted to
3491 gimple tuples. Return true if T contains a node that should have
3492 been converted to a gimple tuple, but hasn't. */
3493
3494 static bool
3495 verify_gimple_tuples (tree t)
3496 {
3497 return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
3498 }
3499
3500 static bool eh_error_found;
3501 static int
3502 verify_eh_throw_stmt_node (void **slot, void *data)
3503 {
3504 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
3505 struct pointer_set_t *visited = (struct pointer_set_t *) data;
3506
3507 if (!pointer_set_contains (visited, node->stmt))
3508 {
3509 error ("Dead STMT in EH table");
3510 debug_generic_stmt (node->stmt);
3511 eh_error_found = true;
3512 }
3513 return 0;
3514 }
3515
3516 /* Verify the GIMPLE statement chain. */
3517
3518 void
3519 verify_stmts (void)
3520 {
3521 basic_block bb;
3522 block_stmt_iterator bsi;
3523 bool err = false;
3524 struct pointer_set_t *visited, *visited_stmts;
3525 tree addr;
3526
3527 timevar_push (TV_TREE_STMT_VERIFY);
3528 visited = pointer_set_create ();
3529 visited_stmts = pointer_set_create ();
3530
3531 FOR_EACH_BB (bb)
3532 {
3533 tree phi;
3534 int i;
3535
3536 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3537 {
3538 int phi_num_args = PHI_NUM_ARGS (phi);
3539
3540 pointer_set_insert (visited_stmts, phi);
3541 if (bb_for_stmt (phi) != bb)
3542 {
3543 error ("bb_for_stmt (phi) is set to a wrong basic block");
3544 err |= true;
3545 }
3546
3547 for (i = 0; i < phi_num_args; i++)
3548 {
3549 tree t = PHI_ARG_DEF (phi, i);
3550 tree addr;
3551
3552 /* Addressable variables do have SSA_NAMEs but they
3553 are not considered gimple values. */
3554 if (TREE_CODE (t) != SSA_NAME
3555 && TREE_CODE (t) != FUNCTION_DECL
3556 && !is_gimple_val (t))
3557 {
3558 error ("PHI def is not a GIMPLE value");
3559 debug_generic_stmt (phi);
3560 debug_generic_stmt (t);
3561 err |= true;
3562 }
3563
3564 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3565 if (addr)
3566 {
3567 debug_generic_stmt (addr);
3568 err |= true;
3569 }
3570
3571 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
3572 if (addr)
3573 {
3574 error ("incorrect sharing of tree nodes");
3575 debug_generic_stmt (phi);
3576 debug_generic_stmt (addr);
3577 err |= true;
3578 }
3579 }
3580 }
3581
3582 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3583 {
3584 tree stmt = bsi_stmt (bsi);
3585
3586 pointer_set_insert (visited_stmts, stmt);
3587 err |= verify_gimple_tuples (stmt);
3588
3589 if (bb_for_stmt (stmt) != bb)
3590 {
3591 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3592 err |= true;
3593 }
3594
3595 bsi_next (&bsi);
3596 err |= verify_stmt (stmt, bsi_end_p (bsi));
3597 addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
3598 if (addr)
3599 {
3600 error ("incorrect sharing of tree nodes");
3601 debug_generic_stmt (stmt);
3602 debug_generic_stmt (addr);
3603 err |= true;
3604 }
3605 }
3606 }
3607 eh_error_found = false;
3608 if (get_eh_throw_stmt_table (cfun))
3609 htab_traverse (get_eh_throw_stmt_table (cfun),
3610 verify_eh_throw_stmt_node,
3611 visited_stmts);
3612
3613 if (err | eh_error_found)
3614 internal_error ("verify_stmts failed");
3615
3616 pointer_set_destroy (visited);
3617 pointer_set_destroy (visited_stmts);
3618 verify_histograms ();
3619 timevar_pop (TV_TREE_STMT_VERIFY);
3620 }
3621
3622
3623 /* Verifies that the flow information is OK. */
3624
3625 static int
3626 tree_verify_flow_info (void)
3627 {
3628 int err = 0;
3629 basic_block bb;
3630 block_stmt_iterator bsi;
3631 tree stmt;
3632 edge e;
3633 edge_iterator ei;
3634
3635 if (ENTRY_BLOCK_PTR->stmt_list)
3636 {
3637 error ("ENTRY_BLOCK has a statement list associated with it");
3638 err = 1;
3639 }
3640
3641 if (EXIT_BLOCK_PTR->stmt_list)
3642 {
3643 error ("EXIT_BLOCK has a statement list associated with it");
3644 err = 1;
3645 }
3646
3647 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3648 if (e->flags & EDGE_FALLTHRU)
3649 {
3650 error ("fallthru to exit from bb %d", e->src->index);
3651 err = 1;
3652 }
3653
3654 FOR_EACH_BB (bb)
3655 {
3656 bool found_ctrl_stmt = false;
3657
3658 stmt = NULL_TREE;
3659
3660 /* Skip labels on the start of basic block. */
3661 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3662 {
3663 tree prev_stmt = stmt;
3664
3665 stmt = bsi_stmt (bsi);
3666
3667 if (TREE_CODE (stmt) != LABEL_EXPR)
3668 break;
3669
3670 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3671 {
3672 error ("nonlocal label ");
3673 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3674 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3675 bb->index);
3676 err = 1;
3677 }
3678
3679 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3680 {
3681 error ("label ");
3682 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3683 fprintf (stderr, " to block does not match in bb %d",
3684 bb->index);
3685 err = 1;
3686 }
3687
3688 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3689 != current_function_decl)
3690 {
3691 error ("label ");
3692 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3693 fprintf (stderr, " has incorrect context in bb %d",
3694 bb->index);
3695 err = 1;
3696 }
3697 }
3698
3699 /* Verify that body of basic block BB is free of control flow. */
3700 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3701 {
3702 tree stmt = bsi_stmt (bsi);
3703
3704 if (found_ctrl_stmt)
3705 {
3706 error ("control flow in the middle of basic block %d",
3707 bb->index);
3708 err = 1;
3709 }
3710
3711 if (stmt_ends_bb_p (stmt))
3712 found_ctrl_stmt = true;
3713
3714 if (TREE_CODE (stmt) == LABEL_EXPR)
3715 {
3716 error ("label ");
3717 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3718 fprintf (stderr, " in the middle of basic block %d", bb->index);
3719 err = 1;
3720 }
3721 }
3722
3723 bsi = bsi_last (bb);
3724 if (bsi_end_p (bsi))
3725 continue;
3726
3727 stmt = bsi_stmt (bsi);
3728
3729 err |= verify_eh_edges (stmt);
3730
3731 if (is_ctrl_stmt (stmt))
3732 {
3733 FOR_EACH_EDGE (e, ei, bb->succs)
3734 if (e->flags & EDGE_FALLTHRU)
3735 {
3736 error ("fallthru edge after a control statement in bb %d",
3737 bb->index);
3738 err = 1;
3739 }
3740 }
3741
3742 if (TREE_CODE (stmt) != COND_EXPR)
3743 {
3744 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3745 after anything else but if statement. */
3746 FOR_EACH_EDGE (e, ei, bb->succs)
3747 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3748 {
3749 error ("true/false edge after a non-COND_EXPR in bb %d",
3750 bb->index);
3751 err = 1;
3752 }
3753 }
3754
3755 switch (TREE_CODE (stmt))
3756 {
3757 case COND_EXPR:
3758 {
3759 edge true_edge;
3760 edge false_edge;
3761 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3762 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3763 {
3764 error ("structured COND_EXPR at the end of bb %d", bb->index);
3765 err = 1;
3766 }
3767
3768 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3769
3770 if (!true_edge || !false_edge
3771 || !(true_edge->flags & EDGE_TRUE_VALUE)
3772 || !(false_edge->flags & EDGE_FALSE_VALUE)
3773 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3774 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3775 || EDGE_COUNT (bb->succs) >= 3)
3776 {
3777 error ("wrong outgoing edge flags at end of bb %d",
3778 bb->index);
3779 err = 1;
3780 }
3781
3782 if (!has_label_p (true_edge->dest,
3783 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3784 {
3785 error ("%<then%> label does not match edge at end of bb %d",
3786 bb->index);
3787 err = 1;
3788 }
3789
3790 if (!has_label_p (false_edge->dest,
3791 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3792 {
3793 error ("%<else%> label does not match edge at end of bb %d",
3794 bb->index);
3795 err = 1;
3796 }
3797 }
3798 break;
3799
3800 case GOTO_EXPR:
3801 if (simple_goto_p (stmt))
3802 {
3803 error ("explicit goto at end of bb %d", bb->index);
3804 err = 1;
3805 }
3806 else
3807 {
3808 /* FIXME. We should double check that the labels in the
3809 destination blocks have their address taken. */
3810 FOR_EACH_EDGE (e, ei, bb->succs)
3811 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3812 | EDGE_FALSE_VALUE))
3813 || !(e->flags & EDGE_ABNORMAL))
3814 {
3815 error ("wrong outgoing edge flags at end of bb %d",
3816 bb->index);
3817 err = 1;
3818 }
3819 }
3820 break;
3821
3822 case RETURN_EXPR:
3823 if (!single_succ_p (bb)
3824 || (single_succ_edge (bb)->flags
3825 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3826 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3827 {
3828 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3829 err = 1;
3830 }
3831 if (single_succ (bb) != EXIT_BLOCK_PTR)
3832 {
3833 error ("return edge does not point to exit in bb %d",
3834 bb->index);
3835 err = 1;
3836 }
3837 break;
3838
3839 case SWITCH_EXPR:
3840 {
3841 tree prev;
3842 edge e;
3843 size_t i, n;
3844 tree vec;
3845
3846 vec = SWITCH_LABELS (stmt);
3847 n = TREE_VEC_LENGTH (vec);
3848
3849 /* Mark all the destination basic blocks. */
3850 for (i = 0; i < n; ++i)
3851 {
3852 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3853 basic_block label_bb = label_to_block (lab);
3854
3855 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3856 label_bb->aux = (void *)1;
3857 }
3858
3859 /* Verify that the case labels are sorted. */
3860 prev = TREE_VEC_ELT (vec, 0);
3861 for (i = 1; i < n - 1; ++i)
3862 {
3863 tree c = TREE_VEC_ELT (vec, i);
3864 if (! CASE_LOW (c))
3865 {
3866 error ("found default case not at end of case vector");
3867 err = 1;
3868 continue;
3869 }
3870 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3871 {
3872 error ("case labels not sorted: ");
3873 print_generic_expr (stderr, prev, 0);
3874 fprintf (stderr," is greater than ");
3875 print_generic_expr (stderr, c, 0);
3876 fprintf (stderr," but comes before it.\n");
3877 err = 1;
3878 }
3879 prev = c;
3880 }
3881 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3882 {
3883 error ("no default case found at end of case vector");
3884 err = 1;
3885 }
3886
3887 FOR_EACH_EDGE (e, ei, bb->succs)
3888 {
3889 if (!e->dest->aux)
3890 {
3891 error ("extra outgoing edge %d->%d",
3892 bb->index, e->dest->index);
3893 err = 1;
3894 }
3895 e->dest->aux = (void *)2;
3896 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3897 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3898 {
3899 error ("wrong outgoing edge flags at end of bb %d",
3900 bb->index);
3901 err = 1;
3902 }
3903 }
3904
3905 /* Check that we have all of them. */
3906 for (i = 0; i < n; ++i)
3907 {
3908 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3909 basic_block label_bb = label_to_block (lab);
3910
3911 if (label_bb->aux != (void *)2)
3912 {
3913 error ("missing edge %i->%i",
3914 bb->index, label_bb->index);
3915 err = 1;
3916 }
3917 }
3918
3919 FOR_EACH_EDGE (e, ei, bb->succs)
3920 e->dest->aux = (void *)0;
3921 }
3922
3923 default: ;
3924 }
3925 }
3926
3927 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3928 verify_dominators (CDI_DOMINATORS);
3929
3930 return err;
3931 }
3932
3933
3934 /* Updates phi nodes after creating a forwarder block joined
3935 by edge FALLTHRU. */
3936
3937 static void
3938 tree_make_forwarder_block (edge fallthru)
3939 {
3940 edge e;
3941 edge_iterator ei;
3942 basic_block dummy, bb;
3943 tree phi, new_phi, var;
3944
3945 dummy = fallthru->src;
3946 bb = fallthru->dest;
3947
3948 if (single_pred_p (bb))
3949 return;
3950
3951 /* If we redirected a branch we must create new PHI nodes at the
3952 start of BB. */
3953 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3954 {
3955 var = PHI_RESULT (phi);
3956 new_phi = create_phi_node (var, bb);
3957 SSA_NAME_DEF_STMT (var) = new_phi;
3958 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3959 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3960 }
3961
3962 /* Ensure that the PHI node chain is in the same order. */
3963 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3964
3965 /* Add the arguments we have stored on edges. */
3966 FOR_EACH_EDGE (e, ei, bb->preds)
3967 {
3968 if (e == fallthru)
3969 continue;
3970
3971 flush_pending_stmts (e);
3972 }
3973 }
3974
3975
3976 /* Return a non-special label in the head of basic block BLOCK.
3977 Create one if it doesn't exist. */
3978
3979 tree
3980 tree_block_label (basic_block bb)
3981 {
3982 block_stmt_iterator i, s = bsi_start (bb);
3983 bool first = true;
3984 tree label, stmt;
3985
3986 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3987 {
3988 stmt = bsi_stmt (i);
3989 if (TREE_CODE (stmt) != LABEL_EXPR)
3990 break;
3991 label = LABEL_EXPR_LABEL (stmt);
3992 if (!DECL_NONLOCAL (label))
3993 {
3994 if (!first)
3995 bsi_move_before (&i, &s);
3996 return label;
3997 }
3998 }
3999
4000 label = create_artificial_label ();
4001 stmt = build1 (LABEL_EXPR, void_type_node, label);
4002 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4003 return label;
4004 }
4005
4006
4007 /* Attempt to perform edge redirection by replacing a possibly complex
4008 jump instruction by a goto or by removing the jump completely.
4009 This can apply only if all edges now point to the same block. The
4010 parameters and return values are equivalent to
4011 redirect_edge_and_branch. */
4012
4013 static edge
4014 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4015 {
4016 basic_block src = e->src;
4017 block_stmt_iterator b;
4018 tree stmt;
4019
4020 /* We can replace or remove a complex jump only when we have exactly
4021 two edges. */
4022 if (EDGE_COUNT (src->succs) != 2
4023 /* Verify that all targets will be TARGET. Specifically, the
4024 edge that is not E must also go to TARGET. */
4025 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4026 return NULL;
4027
4028 b = bsi_last (src);
4029 if (bsi_end_p (b))
4030 return NULL;
4031 stmt = bsi_stmt (b);
4032
4033 if (TREE_CODE (stmt) == COND_EXPR
4034 || TREE_CODE (stmt) == SWITCH_EXPR)
4035 {
4036 bsi_remove (&b, true);
4037 e = ssa_redirect_edge (e, target);
4038 e->flags = EDGE_FALLTHRU;
4039 return e;
4040 }
4041
4042 return NULL;
4043 }
4044
4045
4046 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4047 edge representing the redirected branch. */
4048
4049 static edge
4050 tree_redirect_edge_and_branch (edge e, basic_block dest)
4051 {
4052 basic_block bb = e->src;
4053 block_stmt_iterator bsi;
4054 edge ret;
4055 tree label, stmt;
4056
4057 if (e->flags & EDGE_ABNORMAL)
4058 return NULL;
4059
4060 if (e->src != ENTRY_BLOCK_PTR
4061 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4062 return ret;
4063
4064 if (e->dest == dest)
4065 return NULL;
4066
4067 label = tree_block_label (dest);
4068
4069 bsi = bsi_last (bb);
4070 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4071
4072 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4073 {
4074 case COND_EXPR:
4075 stmt = (e->flags & EDGE_TRUE_VALUE
4076 ? COND_EXPR_THEN (stmt)
4077 : COND_EXPR_ELSE (stmt));
4078 GOTO_DESTINATION (stmt) = label;
4079 break;
4080
4081 case GOTO_EXPR:
4082 /* No non-abnormal edges should lead from a non-simple goto, and
4083 simple ones should be represented implicitly. */
4084 gcc_unreachable ();
4085
4086 case SWITCH_EXPR:
4087 {
4088 tree cases = get_cases_for_edge (e, stmt);
4089
4090 /* If we have a list of cases associated with E, then use it
4091 as it's a lot faster than walking the entire case vector. */
4092 if (cases)
4093 {
4094 edge e2 = find_edge (e->src, dest);
4095 tree last, first;
4096
4097 first = cases;
4098 while (cases)
4099 {
4100 last = cases;
4101 CASE_LABEL (cases) = label;
4102 cases = TREE_CHAIN (cases);
4103 }
4104
4105 /* If there was already an edge in the CFG, then we need
4106 to move all the cases associated with E to E2. */
4107 if (e2)
4108 {
4109 tree cases2 = get_cases_for_edge (e2, stmt);
4110
4111 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4112 TREE_CHAIN (cases2) = first;
4113 }
4114 }
4115 else
4116 {
4117 tree vec = SWITCH_LABELS (stmt);
4118 size_t i, n = TREE_VEC_LENGTH (vec);
4119
4120 for (i = 0; i < n; i++)
4121 {
4122 tree elt = TREE_VEC_ELT (vec, i);
4123
4124 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4125 CASE_LABEL (elt) = label;
4126 }
4127 }
4128
4129 break;
4130 }
4131
4132 case RETURN_EXPR:
4133 bsi_remove (&bsi, true);
4134 e->flags |= EDGE_FALLTHRU;
4135 break;
4136
4137 default:
4138 /* Otherwise it must be a fallthru edge, and we don't need to
4139 do anything besides redirecting it. */
4140 gcc_assert (e->flags & EDGE_FALLTHRU);
4141 break;
4142 }
4143
4144 /* Update/insert PHI nodes as necessary. */
4145
4146 /* Now update the edges in the CFG. */
4147 e = ssa_redirect_edge (e, dest);
4148
4149 return e;
4150 }
4151
4152 /* Returns true if it is possible to remove edge E by redirecting
4153 it to the destination of the other edge from E->src. */
4154
4155 static bool
4156 tree_can_remove_branch_p (edge e)
4157 {
4158 if (e->flags & EDGE_ABNORMAL)
4159 return false;
4160
4161 return true;
4162 }
4163
4164 /* Simple wrapper, as we can always redirect fallthru edges. */
4165
4166 static basic_block
4167 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4168 {
4169 e = tree_redirect_edge_and_branch (e, dest);
4170 gcc_assert (e);
4171
4172 return NULL;
4173 }
4174
4175
4176 /* Splits basic block BB after statement STMT (but at least after the
4177 labels). If STMT is NULL, BB is split just after the labels. */
4178
4179 static basic_block
4180 tree_split_block (basic_block bb, void *stmt)
4181 {
4182 block_stmt_iterator bsi;
4183 tree_stmt_iterator tsi_tgt;
4184 tree act;
4185 basic_block new_bb;
4186 edge e;
4187 edge_iterator ei;
4188
4189 new_bb = create_empty_bb (bb);
4190
4191 /* Redirect the outgoing edges. */
4192 new_bb->succs = bb->succs;
4193 bb->succs = NULL;
4194 FOR_EACH_EDGE (e, ei, new_bb->succs)
4195 e->src = new_bb;
4196
4197 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4198 stmt = NULL;
4199
4200 /* Move everything from BSI to the new basic block. */
4201 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4202 {
4203 act = bsi_stmt (bsi);
4204 if (TREE_CODE (act) == LABEL_EXPR)
4205 continue;
4206
4207 if (!stmt)
4208 break;
4209
4210 if (stmt == act)
4211 {
4212 bsi_next (&bsi);
4213 break;
4214 }
4215 }
4216
4217 if (bsi_end_p (bsi))
4218 return new_bb;
4219
4220 /* Split the statement list - avoid re-creating new containers as this
4221 brings ugly quadratic memory consumption in the inliner.
4222 (We are still quadratic since we need to update stmt BB pointers,
4223 sadly.) */
4224 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4225 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4226 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4227 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4228
4229 return new_bb;
4230 }
4231
4232
4233 /* Moves basic block BB after block AFTER. */
4234
4235 static bool
4236 tree_move_block_after (basic_block bb, basic_block after)
4237 {
4238 if (bb->prev_bb == after)
4239 return true;
4240
4241 unlink_block (bb);
4242 link_block (bb, after);
4243
4244 return true;
4245 }
4246
4247
4248 /* Return true if basic_block can be duplicated. */
4249
4250 static bool
4251 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4252 {
4253 return true;
4254 }
4255
4256
4257 /* Create a duplicate of the basic block BB. NOTE: This does not
4258 preserve SSA form. */
4259
4260 static basic_block
4261 tree_duplicate_bb (basic_block bb)
4262 {
4263 basic_block new_bb;
4264 block_stmt_iterator bsi, bsi_tgt;
4265 tree phi;
4266
4267 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4268
4269 /* Copy the PHI nodes. We ignore PHI node arguments here because
4270 the incoming edges have not been setup yet. */
4271 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4272 {
4273 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4274 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4275 }
4276
4277 /* Keep the chain of PHI nodes in the same order so that they can be
4278 updated by ssa_redirect_edge. */
4279 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4280
4281 bsi_tgt = bsi_start (new_bb);
4282 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4283 {
4284 def_operand_p def_p;
4285 ssa_op_iter op_iter;
4286 tree stmt, copy;
4287 int region;
4288
4289 stmt = bsi_stmt (bsi);
4290 if (TREE_CODE (stmt) == LABEL_EXPR)
4291 continue;
4292
4293 /* Create a new copy of STMT and duplicate STMT's virtual
4294 operands. */
4295 copy = unshare_expr (stmt);
4296 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4297 copy_virtual_operands (copy, stmt);
4298 region = lookup_stmt_eh_region (stmt);
4299 if (region >= 0)
4300 add_stmt_to_eh_region (copy, region);
4301 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
4302
4303 /* Create new names for all the definitions created by COPY and
4304 add replacement mappings for each new name. */
4305 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4306 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4307 }
4308
4309 return new_bb;
4310 }
4311
4312
4313 /* Basic block BB_COPY was created by code duplication. Add phi node
4314 arguments for edges going out of BB_COPY. The blocks that were
4315 duplicated have BB_DUPLICATED set. */
4316
4317 void
4318 add_phi_args_after_copy_bb (basic_block bb_copy)
4319 {
4320 basic_block bb, dest;
4321 edge e, e_copy;
4322 edge_iterator ei;
4323 tree phi, phi_copy, phi_next, def;
4324
4325 bb = get_bb_original (bb_copy);
4326
4327 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4328 {
4329 if (!phi_nodes (e_copy->dest))
4330 continue;
4331
4332 if (e_copy->dest->flags & BB_DUPLICATED)
4333 dest = get_bb_original (e_copy->dest);
4334 else
4335 dest = e_copy->dest;
4336
4337 e = find_edge (bb, dest);
4338 if (!e)
4339 {
4340 /* During loop unrolling the target of the latch edge is copied.
4341 In this case we are not looking for edge to dest, but to
4342 duplicated block whose original was dest. */
4343 FOR_EACH_EDGE (e, ei, bb->succs)
4344 if ((e->dest->flags & BB_DUPLICATED)
4345 && get_bb_original (e->dest) == dest)
4346 break;
4347
4348 gcc_assert (e != NULL);
4349 }
4350
4351 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4352 phi;
4353 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4354 {
4355 phi_next = PHI_CHAIN (phi);
4356 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4357 add_phi_arg (phi_copy, def, e_copy);
4358 }
4359 }
4360 }
4361
4362 /* Blocks in REGION_COPY array of length N_REGION were created by
4363 duplication of basic blocks. Add phi node arguments for edges
4364 going from these blocks. */
4365
4366 void
4367 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4368 {
4369 unsigned i;
4370
4371 for (i = 0; i < n_region; i++)
4372 region_copy[i]->flags |= BB_DUPLICATED;
4373
4374 for (i = 0; i < n_region; i++)
4375 add_phi_args_after_copy_bb (region_copy[i]);
4376
4377 for (i = 0; i < n_region; i++)
4378 region_copy[i]->flags &= ~BB_DUPLICATED;
4379 }
4380
4381 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4382 important exit edge EXIT. By important we mean that no SSA name defined
4383 inside region is live over the other exit edges of the region. All entry
4384 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4385 to the duplicate of the region. SSA form, dominance and loop information
4386 is updated. The new basic blocks are stored to REGION_COPY in the same
4387 order as they had in REGION, provided that REGION_COPY is not NULL.
4388 The function returns false if it is unable to copy the region,
4389 true otherwise. */
4390
4391 bool
4392 tree_duplicate_sese_region (edge entry, edge exit,
4393 basic_block *region, unsigned n_region,
4394 basic_block *region_copy)
4395 {
4396 unsigned i, n_doms;
4397 bool free_region_copy = false, copying_header = false;
4398 struct loop *loop = entry->dest->loop_father;
4399 edge exit_copy;
4400 basic_block *doms;
4401 edge redirected;
4402 int total_freq = 0, entry_freq = 0;
4403 gcov_type total_count = 0, entry_count = 0;
4404
4405 if (!can_copy_bbs_p (region, n_region))
4406 return false;
4407
4408 /* Some sanity checking. Note that we do not check for all possible
4409 missuses of the functions. I.e. if you ask to copy something weird,
4410 it will work, but the state of structures probably will not be
4411 correct. */
4412 for (i = 0; i < n_region; i++)
4413 {
4414 /* We do not handle subloops, i.e. all the blocks must belong to the
4415 same loop. */
4416 if (region[i]->loop_father != loop)
4417 return false;
4418
4419 if (region[i] != entry->dest
4420 && region[i] == loop->header)
4421 return false;
4422 }
4423
4424 loop->copy = loop;
4425
4426 /* In case the function is used for loop header copying (which is the primary
4427 use), ensure that EXIT and its copy will be new latch and entry edges. */
4428 if (loop->header == entry->dest)
4429 {
4430 copying_header = true;
4431 loop->copy = loop->outer;
4432
4433 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4434 return false;
4435
4436 for (i = 0; i < n_region; i++)
4437 if (region[i] != exit->src
4438 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4439 return false;
4440 }
4441
4442 if (!region_copy)
4443 {
4444 region_copy = XNEWVEC (basic_block, n_region);
4445 free_region_copy = true;
4446 }
4447
4448 gcc_assert (!need_ssa_update_p ());
4449
4450 /* Record blocks outside the region that are dominated by something
4451 inside. */
4452 doms = XNEWVEC (basic_block, n_basic_blocks);
4453 initialize_original_copy_tables ();
4454
4455 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4456
4457 if (entry->dest->count)
4458 {
4459 total_count = entry->dest->count;
4460 entry_count = entry->count;
4461 /* Fix up corner cases, to avoid division by zero or creation of negative
4462 frequencies. */
4463 if (entry_count > total_count)
4464 entry_count = total_count;
4465 }
4466 else
4467 {
4468 total_freq = entry->dest->frequency;
4469 entry_freq = EDGE_FREQUENCY (entry);
4470 /* Fix up corner cases, to avoid division by zero or creation of negative
4471 frequencies. */
4472 if (total_freq == 0)
4473 total_freq = 1;
4474 else if (entry_freq > total_freq)
4475 entry_freq = total_freq;
4476 }
4477
4478 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4479 split_edge_bb_loc (entry));
4480 if (total_count)
4481 {
4482 scale_bbs_frequencies_gcov_type (region, n_region,
4483 total_count - entry_count,
4484 total_count);
4485 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4486 total_count);
4487 }
4488 else
4489 {
4490 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4491 total_freq);
4492 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4493 }
4494
4495 if (copying_header)
4496 {
4497 loop->header = exit->dest;
4498 loop->latch = exit->src;
4499 }
4500
4501 /* Redirect the entry and add the phi node arguments. */
4502 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4503 gcc_assert (redirected != NULL);
4504 flush_pending_stmts (entry);
4505
4506 /* Concerning updating of dominators: We must recount dominators
4507 for entry block and its copy. Anything that is outside of the
4508 region, but was dominated by something inside needs recounting as
4509 well. */
4510 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4511 doms[n_doms++] = get_bb_original (entry->dest);
4512 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4513 free (doms);
4514
4515 /* Add the other PHI node arguments. */
4516 add_phi_args_after_copy (region_copy, n_region);
4517
4518 /* Update the SSA web. */
4519 update_ssa (TODO_update_ssa);
4520
4521 if (free_region_copy)
4522 free (region_copy);
4523
4524 free_original_copy_tables ();
4525 return true;
4526 }
4527
4528 /*
4529 DEF_VEC_P(basic_block);
4530 DEF_VEC_ALLOC_P(basic_block,heap);
4531 */
4532
4533 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4534 adding blocks when the dominator traversal reaches EXIT. This
4535 function silently assumes that ENTRY strictly dominates EXIT. */
4536
4537 static void
4538 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4539 VEC(basic_block,heap) **bbs_p)
4540 {
4541 basic_block son;
4542
4543 for (son = first_dom_son (CDI_DOMINATORS, entry);
4544 son;
4545 son = next_dom_son (CDI_DOMINATORS, son))
4546 {
4547 VEC_safe_push (basic_block, heap, *bbs_p, son);
4548 if (son != exit)
4549 gather_blocks_in_sese_region (son, exit, bbs_p);
4550 }
4551 }
4552
4553
4554 struct move_stmt_d
4555 {
4556 tree block;
4557 tree from_context;
4558 tree to_context;
4559 bitmap vars_to_remove;
4560 htab_t new_label_map;
4561 bool remap_decls_p;
4562 };
4563
4564 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4565 contained in *TP and change the DECL_CONTEXT of every local
4566 variable referenced in *TP. */
4567
4568 static tree
4569 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4570 {
4571 struct move_stmt_d *p = (struct move_stmt_d *) data;
4572 tree t = *tp;
4573
4574 if (p->block
4575 && (EXPR_P (t) || GIMPLE_STMT_P (t)))
4576 TREE_BLOCK (t) = p->block;
4577
4578 if (OMP_DIRECTIVE_P (t)
4579 && TREE_CODE (t) != OMP_RETURN
4580 && TREE_CODE (t) != OMP_CONTINUE)
4581 {
4582 /* Do not remap variables inside OMP directives. Variables
4583 referenced in clauses and directive header belong to the
4584 parent function and should not be moved into the child
4585 function. */
4586 bool save_remap_decls_p = p->remap_decls_p;
4587 p->remap_decls_p = false;
4588 *walk_subtrees = 0;
4589
4590 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4591
4592 p->remap_decls_p = save_remap_decls_p;
4593 }
4594 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4595 {
4596 if (TREE_CODE (t) == LABEL_DECL)
4597 {
4598 if (p->new_label_map)
4599 {
4600 struct tree_map in, *out;
4601 in.from = t;
4602 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4603 if (out)
4604 *tp = t = out->to;
4605 }
4606
4607 DECL_CONTEXT (t) = p->to_context;
4608 }
4609 else if (p->remap_decls_p)
4610 {
4611 DECL_CONTEXT (t) = p->to_context;
4612
4613 if (TREE_CODE (t) == VAR_DECL)
4614 {
4615 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4616 f->unexpanded_var_list
4617 = tree_cons (0, t, f->unexpanded_var_list);
4618
4619 /* Mark T to be removed from the original function,
4620 otherwise it will be given a DECL_RTL when the
4621 original function is expanded. */
4622 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4623 }
4624 }
4625 }
4626 else if (TYPE_P (t))
4627 *walk_subtrees = 0;
4628
4629 return NULL_TREE;
4630 }
4631
4632
4633 /* Move basic block BB from function CFUN to function DEST_FN. The
4634 block is moved out of the original linked list and placed after
4635 block AFTER in the new list. Also, the block is removed from the
4636 original array of blocks and placed in DEST_FN's array of blocks.
4637 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4638 updated to reflect the moved edges.
4639
4640 On exit, local variables that need to be removed from
4641 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4642
4643 static void
4644 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4645 basic_block after, bool update_edge_count_p,
4646 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4647 {
4648 struct control_flow_graph *cfg;
4649 edge_iterator ei;
4650 edge e;
4651 block_stmt_iterator si;
4652 struct move_stmt_d d;
4653 unsigned old_len, new_len;
4654
4655 /* Link BB to the new linked list. */
4656 move_block_after (bb, after);
4657
4658 /* Update the edge count in the corresponding flowgraphs. */
4659 if (update_edge_count_p)
4660 FOR_EACH_EDGE (e, ei, bb->succs)
4661 {
4662 cfun->cfg->x_n_edges--;
4663 dest_cfun->cfg->x_n_edges++;
4664 }
4665
4666 /* Remove BB from the original basic block array. */
4667 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4668 cfun->cfg->x_n_basic_blocks--;
4669
4670 /* Grow DEST_CFUN's basic block array if needed. */
4671 cfg = dest_cfun->cfg;
4672 cfg->x_n_basic_blocks++;
4673 if (bb->index > cfg->x_last_basic_block)
4674 cfg->x_last_basic_block = bb->index;
4675
4676 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4677 if ((unsigned) cfg->x_last_basic_block >= old_len)
4678 {
4679 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4680 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
4681 new_len);
4682 }
4683
4684 VEC_replace (basic_block, cfg->x_basic_block_info,
4685 cfg->x_last_basic_block, bb);
4686
4687 /* The statements in BB need to be associated with a new TREE_BLOCK.
4688 Labels need to be associated with a new label-to-block map. */
4689 memset (&d, 0, sizeof (d));
4690 d.vars_to_remove = vars_to_remove;
4691
4692 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4693 {
4694 tree stmt = bsi_stmt (si);
4695 int region;
4696
4697 d.from_context = cfun->decl;
4698 d.to_context = dest_cfun->decl;
4699 d.remap_decls_p = true;
4700 d.new_label_map = new_label_map;
4701 if (TREE_BLOCK (stmt))
4702 d.block = DECL_INITIAL (dest_cfun->decl);
4703
4704 walk_tree (&stmt, move_stmt_r, &d, NULL);
4705
4706 if (TREE_CODE (stmt) == LABEL_EXPR)
4707 {
4708 tree label = LABEL_EXPR_LABEL (stmt);
4709 int uid = LABEL_DECL_UID (label);
4710
4711 gcc_assert (uid > -1);
4712
4713 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4714 if (old_len <= (unsigned) uid)
4715 {
4716 new_len = 3 * uid / 2;
4717 VEC_safe_grow_cleared (basic_block, gc,
4718 cfg->x_label_to_block_map, new_len);
4719 }
4720
4721 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4722 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4723
4724 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4725
4726 if (uid >= dest_cfun->last_label_uid)
4727 dest_cfun->last_label_uid = uid + 1;
4728 }
4729 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4730 TREE_OPERAND (stmt, 0) =
4731 build_int_cst (NULL_TREE,
4732 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4733 + eh_offset);
4734
4735 region = lookup_stmt_eh_region (stmt);
4736 if (region >= 0)
4737 {
4738 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4739 remove_stmt_from_eh_region (stmt);
4740 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
4741 gimple_remove_stmt_histograms (cfun, stmt);
4742 }
4743 }
4744 }
4745
4746 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4747 the outermost EH region. Use REGION as the incoming base EH region. */
4748
4749 static int
4750 find_outermost_region_in_block (struct function *src_cfun,
4751 basic_block bb, int region)
4752 {
4753 block_stmt_iterator si;
4754
4755 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4756 {
4757 tree stmt = bsi_stmt (si);
4758 int stmt_region;
4759
4760 if (TREE_CODE (stmt) == RESX_EXPR)
4761 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4762 else
4763 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4764 if (stmt_region > 0)
4765 {
4766 if (region < 0)
4767 region = stmt_region;
4768 else if (stmt_region != region)
4769 {
4770 region = eh_region_outermost (src_cfun, stmt_region, region);
4771 gcc_assert (region != -1);
4772 }
4773 }
4774 }
4775
4776 return region;
4777 }
4778
4779 static tree
4780 new_label_mapper (tree decl, void *data)
4781 {
4782 htab_t hash = (htab_t) data;
4783 struct tree_map *m;
4784 void **slot;
4785
4786 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4787
4788 m = xmalloc (sizeof (struct tree_map));
4789 m->hash = DECL_UID (decl);
4790 m->from = decl;
4791 m->to = create_artificial_label ();
4792 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4793
4794 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4795 gcc_assert (*slot == NULL);
4796
4797 *slot = m;
4798
4799 return m->to;
4800 }
4801
4802 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4803 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4804 single basic block in the original CFG and the new basic block is
4805 returned. DEST_CFUN must not have a CFG yet.
4806
4807 Note that the region need not be a pure SESE region. Blocks inside
4808 the region may contain calls to abort/exit. The only restriction
4809 is that ENTRY_BB should be the only entry point and it must
4810 dominate EXIT_BB.
4811
4812 All local variables referenced in the region are assumed to be in
4813 the corresponding BLOCK_VARS and unexpanded variable lists
4814 associated with DEST_CFUN. */
4815
4816 basic_block
4817 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4818 basic_block exit_bb)
4819 {
4820 VEC(basic_block,heap) *bbs;
4821 basic_block after, bb, *entry_pred, *exit_succ;
4822 struct function *saved_cfun;
4823 int *entry_flag, *exit_flag, eh_offset;
4824 unsigned i, num_entry_edges, num_exit_edges;
4825 edge e;
4826 edge_iterator ei;
4827 bitmap vars_to_remove;
4828 htab_t new_label_map;
4829
4830 saved_cfun = cfun;
4831
4832 /* Collect all the blocks in the region. Manually add ENTRY_BB
4833 because it won't be added by dfs_enumerate_from. */
4834 calculate_dominance_info (CDI_DOMINATORS);
4835
4836 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4837 region. */
4838 gcc_assert (entry_bb != exit_bb
4839 && (!exit_bb
4840 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4841
4842 bbs = NULL;
4843 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4844 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4845
4846 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4847 the predecessor edges to ENTRY_BB and the successor edges to
4848 EXIT_BB so that we can re-attach them to the new basic block that
4849 will replace the region. */
4850 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4851 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4852 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4853 i = 0;
4854 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4855 {
4856 entry_flag[i] = e->flags;
4857 entry_pred[i++] = e->src;
4858 remove_edge (e);
4859 }
4860
4861 if (exit_bb)
4862 {
4863 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4864 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4865 sizeof (basic_block));
4866 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4867 i = 0;
4868 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4869 {
4870 exit_flag[i] = e->flags;
4871 exit_succ[i++] = e->dest;
4872 remove_edge (e);
4873 }
4874 }
4875 else
4876 {
4877 num_exit_edges = 0;
4878 exit_succ = NULL;
4879 exit_flag = NULL;
4880 }
4881
4882 /* Switch context to the child function to initialize DEST_FN's CFG. */
4883 gcc_assert (dest_cfun->cfg == NULL);
4884 cfun = dest_cfun;
4885
4886 init_empty_tree_cfg ();
4887
4888 /* Initialize EH information for the new function. */
4889 eh_offset = 0;
4890 new_label_map = NULL;
4891 if (saved_cfun->eh)
4892 {
4893 int region = -1;
4894
4895 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4896 region = find_outermost_region_in_block (saved_cfun, bb, region);
4897
4898 init_eh_for_function ();
4899 if (region != -1)
4900 {
4901 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4902 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4903 new_label_map, region, 0);
4904 }
4905 }
4906
4907 cfun = saved_cfun;
4908
4909 /* Move blocks from BBS into DEST_CFUN. */
4910 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4911 after = dest_cfun->cfg->x_entry_block_ptr;
4912 vars_to_remove = BITMAP_ALLOC (NULL);
4913 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4914 {
4915 /* No need to update edge counts on the last block. It has
4916 already been updated earlier when we detached the region from
4917 the original CFG. */
4918 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4919 new_label_map, eh_offset);
4920 after = bb;
4921 }
4922
4923 if (new_label_map)
4924 htab_delete (new_label_map);
4925
4926 /* Remove the variables marked in VARS_TO_REMOVE from
4927 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4928 DECL_RTL in the context of CFUN. */
4929 if (!bitmap_empty_p (vars_to_remove))
4930 {
4931 tree *p;
4932
4933 for (p = &cfun->unexpanded_var_list; *p; )
4934 {
4935 tree var = TREE_VALUE (*p);
4936 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4937 {
4938 *p = TREE_CHAIN (*p);
4939 continue;
4940 }
4941
4942 p = &TREE_CHAIN (*p);
4943 }
4944 }
4945
4946 BITMAP_FREE (vars_to_remove);
4947
4948 /* Rewire the entry and exit blocks. The successor to the entry
4949 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4950 the child function. Similarly, the predecessor of DEST_FN's
4951 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4952 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4953 various CFG manipulation function get to the right CFG.
4954
4955 FIXME, this is silly. The CFG ought to become a parameter to
4956 these helpers. */
4957 cfun = dest_cfun;
4958 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4959 if (exit_bb)
4960 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4961 cfun = saved_cfun;
4962
4963 /* Back in the original function, the SESE region has disappeared,
4964 create a new basic block in its place. */
4965 bb = create_empty_bb (entry_pred[0]);
4966 for (i = 0; i < num_entry_edges; i++)
4967 make_edge (entry_pred[i], bb, entry_flag[i]);
4968
4969 for (i = 0; i < num_exit_edges; i++)
4970 make_edge (bb, exit_succ[i], exit_flag[i]);
4971
4972 if (exit_bb)
4973 {
4974 free (exit_flag);
4975 free (exit_succ);
4976 }
4977 free (entry_flag);
4978 free (entry_pred);
4979 free_dominance_info (CDI_DOMINATORS);
4980 free_dominance_info (CDI_POST_DOMINATORS);
4981 VEC_free (basic_block, heap, bbs);
4982
4983 return bb;
4984 }
4985
4986
4987 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4988
4989 void
4990 dump_function_to_file (tree fn, FILE *file, int flags)
4991 {
4992 tree arg, vars, var;
4993 bool ignore_topmost_bind = false, any_var = false;
4994 basic_block bb;
4995 tree chain;
4996 struct function *saved_cfun;
4997
4998 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4999
5000 arg = DECL_ARGUMENTS (fn);
5001 while (arg)
5002 {
5003 print_generic_expr (file, arg, dump_flags);
5004 if (TREE_CHAIN (arg))
5005 fprintf (file, ", ");
5006 arg = TREE_CHAIN (arg);
5007 }
5008 fprintf (file, ")\n");
5009
5010 if (flags & TDF_DETAILS)
5011 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5012 if (flags & TDF_RAW)
5013 {
5014 dump_node (fn, TDF_SLIM | flags, file);
5015 return;
5016 }
5017
5018 /* Switch CFUN to point to FN. */
5019 saved_cfun = cfun;
5020 cfun = DECL_STRUCT_FUNCTION (fn);
5021
5022 /* When GIMPLE is lowered, the variables are no longer available in
5023 BIND_EXPRs, so display them separately. */
5024 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5025 {
5026 ignore_topmost_bind = true;
5027
5028 fprintf (file, "{\n");
5029 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5030 {
5031 var = TREE_VALUE (vars);
5032
5033 print_generic_decl (file, var, flags);
5034 fprintf (file, "\n");
5035
5036 any_var = true;
5037 }
5038 }
5039
5040 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5041 {
5042 /* Make a CFG based dump. */
5043 check_bb_profile (ENTRY_BLOCK_PTR, file);
5044 if (!ignore_topmost_bind)
5045 fprintf (file, "{\n");
5046
5047 if (any_var && n_basic_blocks)
5048 fprintf (file, "\n");
5049
5050 FOR_EACH_BB (bb)
5051 dump_generic_bb (file, bb, 2, flags);
5052
5053 fprintf (file, "}\n");
5054 check_bb_profile (EXIT_BLOCK_PTR, file);
5055 }
5056 else
5057 {
5058 int indent;
5059
5060 /* Make a tree based dump. */
5061 chain = DECL_SAVED_TREE (fn);
5062
5063 if (chain && TREE_CODE (chain) == BIND_EXPR)
5064 {
5065 if (ignore_topmost_bind)
5066 {
5067 chain = BIND_EXPR_BODY (chain);
5068 indent = 2;
5069 }
5070 else
5071 indent = 0;
5072 }
5073 else
5074 {
5075 if (!ignore_topmost_bind)
5076 fprintf (file, "{\n");
5077 indent = 2;
5078 }
5079
5080 if (any_var)
5081 fprintf (file, "\n");
5082
5083 print_generic_stmt_indented (file, chain, flags, indent);
5084 if (ignore_topmost_bind)
5085 fprintf (file, "}\n");
5086 }
5087
5088 fprintf (file, "\n\n");
5089
5090 /* Restore CFUN. */
5091 cfun = saved_cfun;
5092 }
5093
5094
5095 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5096
5097 void
5098 debug_function (tree fn, int flags)
5099 {
5100 dump_function_to_file (fn, stderr, flags);
5101 }
5102
5103
5104 /* Pretty print of the loops intermediate representation. */
5105 static void print_loop (FILE *, struct loop *, int);
5106 static void print_pred_bbs (FILE *, basic_block bb);
5107 static void print_succ_bbs (FILE *, basic_block bb);
5108
5109
5110 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5111
5112 static void
5113 print_pred_bbs (FILE *file, basic_block bb)
5114 {
5115 edge e;
5116 edge_iterator ei;
5117
5118 FOR_EACH_EDGE (e, ei, bb->preds)
5119 fprintf (file, "bb_%d ", e->src->index);
5120 }
5121
5122
5123 /* Print on FILE the indexes for the successors of basic_block BB. */
5124
5125 static void
5126 print_succ_bbs (FILE *file, basic_block bb)
5127 {
5128 edge e;
5129 edge_iterator ei;
5130
5131 FOR_EACH_EDGE (e, ei, bb->succs)
5132 fprintf (file, "bb_%d ", e->dest->index);
5133 }
5134
5135
5136 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5137
5138 static void
5139 print_loop (FILE *file, struct loop *loop, int indent)
5140 {
5141 char *s_indent;
5142 basic_block bb;
5143
5144 if (loop == NULL)
5145 return;
5146
5147 s_indent = (char *) alloca ((size_t) indent + 1);
5148 memset ((void *) s_indent, ' ', (size_t) indent);
5149 s_indent[indent] = '\0';
5150
5151 /* Print the loop's header. */
5152 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5153
5154 /* Print the loop's body. */
5155 fprintf (file, "%s{\n", s_indent);
5156 FOR_EACH_BB (bb)
5157 if (bb->loop_father == loop)
5158 {
5159 /* Print the basic_block's header. */
5160 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5161 print_pred_bbs (file, bb);
5162 fprintf (file, "}, succs = {");
5163 print_succ_bbs (file, bb);
5164 fprintf (file, "})\n");
5165
5166 /* Print the basic_block's body. */
5167 fprintf (file, "%s {\n", s_indent);
5168 tree_dump_bb (bb, file, indent + 4);
5169 fprintf (file, "%s }\n", s_indent);
5170 }
5171
5172 print_loop (file, loop->inner, indent + 2);
5173 fprintf (file, "%s}\n", s_indent);
5174 print_loop (file, loop->next, indent);
5175 }
5176
5177
5178 /* Follow a CFG edge from the entry point of the program, and on entry
5179 of a loop, pretty print the loop structure on FILE. */
5180
5181 void
5182 print_loop_ir (FILE *file)
5183 {
5184 basic_block bb;
5185
5186 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5187 if (bb && bb->loop_father)
5188 print_loop (file, bb->loop_father, 0);
5189 }
5190
5191
5192 /* Debugging loops structure at tree level. */
5193
5194 void
5195 debug_loop_ir (void)
5196 {
5197 print_loop_ir (stderr);
5198 }
5199
5200
5201 /* Return true if BB ends with a call, possibly followed by some
5202 instructions that must stay with the call. Return false,
5203 otherwise. */
5204
5205 static bool
5206 tree_block_ends_with_call_p (basic_block bb)
5207 {
5208 block_stmt_iterator bsi = bsi_last (bb);
5209 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5210 }
5211
5212
5213 /* Return true if BB ends with a conditional branch. Return false,
5214 otherwise. */
5215
5216 static bool
5217 tree_block_ends_with_condjump_p (basic_block bb)
5218 {
5219 tree stmt = last_stmt (bb);
5220 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5221 }
5222
5223
5224 /* Return true if we need to add fake edge to exit at statement T.
5225 Helper function for tree_flow_call_edges_add. */
5226
5227 static bool
5228 need_fake_edge_p (tree t)
5229 {
5230 tree call;
5231
5232 /* NORETURN and LONGJMP calls already have an edge to exit.
5233 CONST and PURE calls do not need one.
5234 We don't currently check for CONST and PURE here, although
5235 it would be a good idea, because those attributes are
5236 figured out from the RTL in mark_constant_function, and
5237 the counter incrementation code from -fprofile-arcs
5238 leads to different results from -fbranch-probabilities. */
5239 call = get_call_expr_in (t);
5240 if (call
5241 && !(call_expr_flags (call) & ECF_NORETURN))
5242 return true;
5243
5244 if (TREE_CODE (t) == ASM_EXPR
5245 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5246 return true;
5247
5248 return false;
5249 }
5250
5251
5252 /* Add fake edges to the function exit for any non constant and non
5253 noreturn calls, volatile inline assembly in the bitmap of blocks
5254 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5255 the number of blocks that were split.
5256
5257 The goal is to expose cases in which entering a basic block does
5258 not imply that all subsequent instructions must be executed. */
5259
5260 static int
5261 tree_flow_call_edges_add (sbitmap blocks)
5262 {
5263 int i;
5264 int blocks_split = 0;
5265 int last_bb = last_basic_block;
5266 bool check_last_block = false;
5267
5268 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5269 return 0;
5270
5271 if (! blocks)
5272 check_last_block = true;
5273 else
5274 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5275
5276 /* In the last basic block, before epilogue generation, there will be
5277 a fallthru edge to EXIT. Special care is required if the last insn
5278 of the last basic block is a call because make_edge folds duplicate
5279 edges, which would result in the fallthru edge also being marked
5280 fake, which would result in the fallthru edge being removed by
5281 remove_fake_edges, which would result in an invalid CFG.
5282
5283 Moreover, we can't elide the outgoing fake edge, since the block
5284 profiler needs to take this into account in order to solve the minimal
5285 spanning tree in the case that the call doesn't return.
5286
5287 Handle this by adding a dummy instruction in a new last basic block. */
5288 if (check_last_block)
5289 {
5290 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5291 block_stmt_iterator bsi = bsi_last (bb);
5292 tree t = NULL_TREE;
5293 if (!bsi_end_p (bsi))
5294 t = bsi_stmt (bsi);
5295
5296 if (t && need_fake_edge_p (t))
5297 {
5298 edge e;
5299
5300 e = find_edge (bb, EXIT_BLOCK_PTR);
5301 if (e)
5302 {
5303 bsi_insert_on_edge (e, build_empty_stmt ());
5304 bsi_commit_edge_inserts ();
5305 }
5306 }
5307 }
5308
5309 /* Now add fake edges to the function exit for any non constant
5310 calls since there is no way that we can determine if they will
5311 return or not... */
5312 for (i = 0; i < last_bb; i++)
5313 {
5314 basic_block bb = BASIC_BLOCK (i);
5315 block_stmt_iterator bsi;
5316 tree stmt, last_stmt;
5317
5318 if (!bb)
5319 continue;
5320
5321 if (blocks && !TEST_BIT (blocks, i))
5322 continue;
5323
5324 bsi = bsi_last (bb);
5325 if (!bsi_end_p (bsi))
5326 {
5327 last_stmt = bsi_stmt (bsi);
5328 do
5329 {
5330 stmt = bsi_stmt (bsi);
5331 if (need_fake_edge_p (stmt))
5332 {
5333 edge e;
5334 /* The handling above of the final block before the
5335 epilogue should be enough to verify that there is
5336 no edge to the exit block in CFG already.
5337 Calling make_edge in such case would cause us to
5338 mark that edge as fake and remove it later. */
5339 #ifdef ENABLE_CHECKING
5340 if (stmt == last_stmt)
5341 {
5342 e = find_edge (bb, EXIT_BLOCK_PTR);
5343 gcc_assert (e == NULL);
5344 }
5345 #endif
5346
5347 /* Note that the following may create a new basic block
5348 and renumber the existing basic blocks. */
5349 if (stmt != last_stmt)
5350 {
5351 e = split_block (bb, stmt);
5352 if (e)
5353 blocks_split++;
5354 }
5355 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5356 }
5357 bsi_prev (&bsi);
5358 }
5359 while (!bsi_end_p (bsi));
5360 }
5361 }
5362
5363 if (blocks_split)
5364 verify_flow_info ();
5365
5366 return blocks_split;
5367 }
5368
5369 /* Purge dead abnormal call edges from basic block BB. */
5370
5371 bool
5372 tree_purge_dead_abnormal_call_edges (basic_block bb)
5373 {
5374 bool changed = tree_purge_dead_eh_edges (bb);
5375
5376 if (current_function_has_nonlocal_label)
5377 {
5378 tree stmt = last_stmt (bb);
5379 edge_iterator ei;
5380 edge e;
5381
5382 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5383 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5384 {
5385 if (e->flags & EDGE_ABNORMAL)
5386 {
5387 remove_edge (e);
5388 changed = true;
5389 }
5390 else
5391 ei_next (&ei);
5392 }
5393
5394 /* See tree_purge_dead_eh_edges below. */
5395 if (changed)
5396 free_dominance_info (CDI_DOMINATORS);
5397 }
5398
5399 return changed;
5400 }
5401
5402 /* Purge dead EH edges from basic block BB. */
5403
5404 bool
5405 tree_purge_dead_eh_edges (basic_block bb)
5406 {
5407 bool changed = false;
5408 edge e;
5409 edge_iterator ei;
5410 tree stmt = last_stmt (bb);
5411
5412 if (stmt && tree_can_throw_internal (stmt))
5413 return false;
5414
5415 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5416 {
5417 if (e->flags & EDGE_EH)
5418 {
5419 remove_edge (e);
5420 changed = true;
5421 }
5422 else
5423 ei_next (&ei);
5424 }
5425
5426 /* Removal of dead EH edges might change dominators of not
5427 just immediate successors. E.g. when bb1 is changed so that
5428 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5429 eh edges purged by this function in:
5430 0
5431 / \
5432 v v
5433 1-->2
5434 / \ |
5435 v v |
5436 3-->4 |
5437 \ v
5438 --->5
5439 |
5440 -
5441 idom(bb5) must be recomputed. For now just free the dominance
5442 info. */
5443 if (changed)
5444 free_dominance_info (CDI_DOMINATORS);
5445
5446 return changed;
5447 }
5448
5449 bool
5450 tree_purge_all_dead_eh_edges (bitmap blocks)
5451 {
5452 bool changed = false;
5453 unsigned i;
5454 bitmap_iterator bi;
5455
5456 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5457 {
5458 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5459 }
5460
5461 return changed;
5462 }
5463
5464 /* This function is called whenever a new edge is created or
5465 redirected. */
5466
5467 static void
5468 tree_execute_on_growing_pred (edge e)
5469 {
5470 basic_block bb = e->dest;
5471
5472 if (phi_nodes (bb))
5473 reserve_phi_args_for_new_edge (bb);
5474 }
5475
5476 /* This function is called immediately before edge E is removed from
5477 the edge vector E->dest->preds. */
5478
5479 static void
5480 tree_execute_on_shrinking_pred (edge e)
5481 {
5482 if (phi_nodes (e->dest))
5483 remove_phi_args (e);
5484 }
5485
5486 /*---------------------------------------------------------------------------
5487 Helper functions for Loop versioning
5488 ---------------------------------------------------------------------------*/
5489
5490 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5491 of 'first'. Both of them are dominated by 'new_head' basic block. When
5492 'new_head' was created by 'second's incoming edge it received phi arguments
5493 on the edge by split_edge(). Later, additional edge 'e' was created to
5494 connect 'new_head' and 'first'. Now this routine adds phi args on this
5495 additional edge 'e' that new_head to second edge received as part of edge
5496 splitting.
5497 */
5498
5499 static void
5500 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5501 basic_block new_head, edge e)
5502 {
5503 tree phi1, phi2;
5504 edge e2 = find_edge (new_head, second);
5505
5506 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5507 edge, we should always have an edge from NEW_HEAD to SECOND. */
5508 gcc_assert (e2 != NULL);
5509
5510 /* Browse all 'second' basic block phi nodes and add phi args to
5511 edge 'e' for 'first' head. PHI args are always in correct order. */
5512
5513 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5514 phi2 && phi1;
5515 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5516 {
5517 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5518 add_phi_arg (phi1, def, e);
5519 }
5520 }
5521
5522 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5523 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5524 the destination of the ELSE part. */
5525 static void
5526 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5527 basic_block cond_bb, void *cond_e)
5528 {
5529 block_stmt_iterator bsi;
5530 tree goto1 = NULL_TREE;
5531 tree goto2 = NULL_TREE;
5532 tree new_cond_expr = NULL_TREE;
5533 tree cond_expr = (tree) cond_e;
5534 edge e0;
5535
5536 /* Build new conditional expr */
5537 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5538 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5539 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5540
5541 /* Add new cond in cond_bb. */
5542 bsi = bsi_start (cond_bb);
5543 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5544 /* Adjust edges appropriately to connect new head with first head
5545 as well as second head. */
5546 e0 = single_succ_edge (cond_bb);
5547 e0->flags &= ~EDGE_FALLTHRU;
5548 e0->flags |= EDGE_FALSE_VALUE;
5549 }
5550
5551 struct cfg_hooks tree_cfg_hooks = {
5552 "tree",
5553 tree_verify_flow_info,
5554 tree_dump_bb, /* dump_bb */
5555 create_bb, /* create_basic_block */
5556 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5557 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5558 tree_can_remove_branch_p, /* can_remove_branch_p */
5559 remove_bb, /* delete_basic_block */
5560 tree_split_block, /* split_block */
5561 tree_move_block_after, /* move_block_after */
5562 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5563 tree_merge_blocks, /* merge_blocks */
5564 tree_predict_edge, /* predict_edge */
5565 tree_predicted_by_p, /* predicted_by_p */
5566 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5567 tree_duplicate_bb, /* duplicate_block */
5568 tree_split_edge, /* split_edge */
5569 tree_make_forwarder_block, /* make_forward_block */
5570 NULL, /* tidy_fallthru_edge */
5571 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5572 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5573 tree_flow_call_edges_add, /* flow_call_edges_add */
5574 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5575 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5576 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5577 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5578 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5579 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5580 flush_pending_stmts /* flush_pending_stmts */
5581 };
5582
5583
5584 /* Split all critical edges. */
5585
5586 static unsigned int
5587 split_critical_edges (void)
5588 {
5589 basic_block bb;
5590 edge e;
5591 edge_iterator ei;
5592
5593 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5594 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5595 mappings around the calls to split_edge. */
5596 start_recording_case_labels ();
5597 FOR_ALL_BB (bb)
5598 {
5599 FOR_EACH_EDGE (e, ei, bb->succs)
5600 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5601 {
5602 split_edge (e);
5603 }
5604 }
5605 end_recording_case_labels ();
5606 return 0;
5607 }
5608
5609 struct tree_opt_pass pass_split_crit_edges =
5610 {
5611 "crited", /* name */
5612 NULL, /* gate */
5613 split_critical_edges, /* execute */
5614 NULL, /* sub */
5615 NULL, /* next */
5616 0, /* static_pass_number */
5617 TV_TREE_SPLIT_EDGES, /* tv_id */
5618 PROP_cfg, /* properties required */
5619 PROP_no_crit_edges, /* properties_provided */
5620 0, /* properties_destroyed */
5621 0, /* todo_flags_start */
5622 TODO_dump_func, /* todo_flags_finish */
5623 0 /* letter */
5624 };
5625
5626 \f
5627 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5628 a temporary, make sure and register it to be renamed if necessary,
5629 and finally return the temporary. Put the statements to compute
5630 EXP before the current statement in BSI. */
5631
5632 tree
5633 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5634 {
5635 tree t, new_stmt, orig_stmt;
5636
5637 if (is_gimple_val (exp))
5638 return exp;
5639
5640 t = make_rename_temp (type, NULL);
5641 new_stmt = build2_gimple (GIMPLE_MODIFY_STMT, t, exp);
5642
5643 orig_stmt = bsi_stmt (*bsi);
5644 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5645 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5646
5647 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5648 if (gimple_in_ssa_p (cfun))
5649 mark_symbols_for_renaming (new_stmt);
5650
5651 return t;
5652 }
5653
5654 /* Build a ternary operation and gimplify it. Emit code before BSI.
5655 Return the gimple_val holding the result. */
5656
5657 tree
5658 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5659 tree type, tree a, tree b, tree c)
5660 {
5661 tree ret;
5662
5663 ret = fold_build3 (code, type, a, b, c);
5664 STRIP_NOPS (ret);
5665
5666 return gimplify_val (bsi, type, ret);
5667 }
5668
5669 /* Build a binary operation and gimplify it. Emit code before BSI.
5670 Return the gimple_val holding the result. */
5671
5672 tree
5673 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5674 tree type, tree a, tree b)
5675 {
5676 tree ret;
5677
5678 ret = fold_build2 (code, type, a, b);
5679 STRIP_NOPS (ret);
5680
5681 return gimplify_val (bsi, type, ret);
5682 }
5683
5684 /* Build a unary operation and gimplify it. Emit code before BSI.
5685 Return the gimple_val holding the result. */
5686
5687 tree
5688 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5689 tree a)
5690 {
5691 tree ret;
5692
5693 ret = fold_build1 (code, type, a);
5694 STRIP_NOPS (ret);
5695
5696 return gimplify_val (bsi, type, ret);
5697 }
5698
5699
5700 \f
5701 /* Emit return warnings. */
5702
5703 static unsigned int
5704 execute_warn_function_return (void)
5705 {
5706 #ifdef USE_MAPPED_LOCATION
5707 source_location location;
5708 #else
5709 location_t *locus;
5710 #endif
5711 tree last;
5712 edge e;
5713 edge_iterator ei;
5714
5715 /* If we have a path to EXIT, then we do return. */
5716 if (TREE_THIS_VOLATILE (cfun->decl)
5717 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5718 {
5719 #ifdef USE_MAPPED_LOCATION
5720 location = UNKNOWN_LOCATION;
5721 #else
5722 locus = NULL;
5723 #endif
5724 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5725 {
5726 last = last_stmt (e->src);
5727 if (TREE_CODE (last) == RETURN_EXPR
5728 #ifdef USE_MAPPED_LOCATION
5729 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5730 #else
5731 && (locus = EXPR_LOCUS (last)) != NULL)
5732 #endif
5733 break;
5734 }
5735 #ifdef USE_MAPPED_LOCATION
5736 if (location == UNKNOWN_LOCATION)
5737 location = cfun->function_end_locus;
5738 warning (0, "%H%<noreturn%> function does return", &location);
5739 #else
5740 if (!locus)
5741 locus = &cfun->function_end_locus;
5742 warning (0, "%H%<noreturn%> function does return", locus);
5743 #endif
5744 }
5745
5746 /* If we see "return;" in some basic block, then we do reach the end
5747 without returning a value. */
5748 else if (warn_return_type
5749 && !TREE_NO_WARNING (cfun->decl)
5750 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5751 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5752 {
5753 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5754 {
5755 tree last = last_stmt (e->src);
5756 if (TREE_CODE (last) == RETURN_EXPR
5757 && TREE_OPERAND (last, 0) == NULL
5758 && !TREE_NO_WARNING (last))
5759 {
5760 #ifdef USE_MAPPED_LOCATION
5761 location = EXPR_LOCATION (last);
5762 if (location == UNKNOWN_LOCATION)
5763 location = cfun->function_end_locus;
5764 warning (0, "%Hcontrol reaches end of non-void function", &location);
5765 #else
5766 locus = EXPR_LOCUS (last);
5767 if (!locus)
5768 locus = &cfun->function_end_locus;
5769 warning (0, "%Hcontrol reaches end of non-void function", locus);
5770 #endif
5771 TREE_NO_WARNING (cfun->decl) = 1;
5772 break;
5773 }
5774 }
5775 }
5776 return 0;
5777 }
5778
5779
5780 /* Given a basic block B which ends with a conditional and has
5781 precisely two successors, determine which of the edges is taken if
5782 the conditional is true and which is taken if the conditional is
5783 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5784
5785 void
5786 extract_true_false_edges_from_block (basic_block b,
5787 edge *true_edge,
5788 edge *false_edge)
5789 {
5790 edge e = EDGE_SUCC (b, 0);
5791
5792 if (e->flags & EDGE_TRUE_VALUE)
5793 {
5794 *true_edge = e;
5795 *false_edge = EDGE_SUCC (b, 1);
5796 }
5797 else
5798 {
5799 *false_edge = e;
5800 *true_edge = EDGE_SUCC (b, 1);
5801 }
5802 }
5803
5804 struct tree_opt_pass pass_warn_function_return =
5805 {
5806 NULL, /* name */
5807 NULL, /* gate */
5808 execute_warn_function_return, /* execute */
5809 NULL, /* sub */
5810 NULL, /* next */
5811 0, /* static_pass_number */
5812 0, /* tv_id */
5813 PROP_cfg, /* properties_required */
5814 0, /* properties_provided */
5815 0, /* properties_destroyed */
5816 0, /* todo_flags_start */
5817 0, /* todo_flags_finish */
5818 0 /* letter */
5819 };
5820
5821 /* Emit noreturn warnings. */
5822
5823 static unsigned int
5824 execute_warn_function_noreturn (void)
5825 {
5826 if (warn_missing_noreturn
5827 && !TREE_THIS_VOLATILE (cfun->decl)
5828 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5829 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5830 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5831 "for attribute %<noreturn%>",
5832 cfun->decl);
5833 return 0;
5834 }
5835
5836 struct tree_opt_pass pass_warn_function_noreturn =
5837 {
5838 NULL, /* name */
5839 NULL, /* gate */
5840 execute_warn_function_noreturn, /* execute */
5841 NULL, /* sub */
5842 NULL, /* next */
5843 0, /* static_pass_number */
5844 0, /* tv_id */
5845 PROP_cfg, /* properties_required */
5846 0, /* properties_provided */
5847 0, /* properties_destroyed */
5848 0, /* todo_flags_start */
5849 0, /* todo_flags_finish */
5850 0 /* letter */
5851 };