ab5a234b3bccb780d02406eb99ed22ffec7f4bbd
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "hard-reg-set.h"
31 #include "basic-block.h"
32 #include "output.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "ggc.h"
37 #include "langhooks.h"
38 #include "diagnostic.h"
39 #include "tree-flow.h"
40 #include "timevar.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "toplev.h"
44 #include "except.h"
45 #include "cfgloop.h"
46 #include "cfglayout.h"
47 #include "hashtab.h"
48 #include "tree-ssa-propagate.h"
49
50 /* This file contains functions for building the Control Flow Graph (CFG)
51 for a function tree. */
52
53 /* Local declarations. */
54
55 /* Initial capacity for the basic block array. */
56 static const int initial_cfg_capacity = 20;
57
58 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
59 which use a particular edge. The CASE_LABEL_EXPRs are chained together
60 via their TREE_CHAIN field, which we clear after we're done with the
61 hash table to prevent problems with duplication of SWITCH_EXPRs.
62
63 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
64 update the case vector in response to edge redirections.
65
66 Right now this table is set up and torn down at key points in the
67 compilation process. It would be nice if we could make the table
68 more persistent. The key is getting notification of changes to
69 the CFG (particularly edge removal, creation and redirection). */
70
71 struct edge_to_cases_elt
72 {
73 /* The edge itself. Necessary for hashing and equality tests. */
74 edge e;
75
76 /* The case labels associated with this edge. We link these up via
77 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
78 when we destroy the hash table. This prevents problems when copying
79 SWITCH_EXPRs. */
80 tree case_labels;
81 };
82
83 static htab_t edge_to_cases;
84
85 /* CFG statistics. */
86 struct cfg_stats_d
87 {
88 long num_merged_labels;
89 };
90
91 static struct cfg_stats_d cfg_stats;
92
93 /* Nonzero if we found a computed goto while building basic blocks. */
94 static bool found_computed_goto;
95
96 /* Basic blocks and flowgraphs. */
97 static basic_block create_bb (void *, void *, basic_block);
98 static void make_blocks (tree);
99 static void factor_computed_gotos (void);
100
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_switch_expr_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static edge tree_redirect_edge_and_branch (edge, basic_block);
107 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
108 static unsigned int split_critical_edges (void);
109
110 /* Various helpers. */
111 static inline bool stmt_starts_bb_p (tree, tree);
112 static int tree_verify_flow_info (void);
113 static void tree_make_forwarder_block (edge);
114 static void tree_cfg2vcg (FILE *);
115 static inline void change_bb_for_stmt (tree t, basic_block bb);
116
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
125
126 void
127 init_empty_tree_cfg (void)
128 {
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = NUM_FIXED_BLOCKS;
133 last_basic_block = NUM_FIXED_BLOCKS;
134 basic_block_info = VEC_alloc (basic_block, gc, initial_cfg_capacity);
135 VEC_safe_grow (basic_block, gc, basic_block_info, initial_cfg_capacity);
136 memset (VEC_address (basic_block, basic_block_info), 0,
137 sizeof (basic_block) * initial_cfg_capacity);
138
139 /* Build a mapping of labels to their associated blocks. */
140 label_to_block_map = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow (basic_block, gc, label_to_block_map, initial_cfg_capacity);
142 memset (VEC_address (basic_block, label_to_block_map),
143 0, sizeof (basic_block) * initial_cfg_capacity);
144
145 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
146 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
147 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
148 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
149 }
150
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
154
155 /* Entry point to the CFG builder for trees. TP points to the list of
156 statements to be added to the flowgraph. */
157
158 static void
159 build_tree_cfg (tree *tp)
160 {
161 /* Register specific tree functions. */
162 tree_register_cfg_hooks ();
163
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
165
166 init_empty_tree_cfg ();
167
168 found_computed_goto = 0;
169 make_blocks (*tp);
170
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
178
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
182
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 {
186 size_t old_size = VEC_length (basic_block, basic_block_info);
187 basic_block *p;
188 VEC_safe_grow (basic_block, gc, basic_block_info, n_basic_blocks);
189 p = VEC_address (basic_block, basic_block_info);
190 memset (&p[old_size], 0,
191 sizeof (basic_block) * (n_basic_blocks - old_size));
192 }
193
194 /* To speed up statement iterator walks, we first purge dead labels. */
195 cleanup_dead_labels ();
196
197 /* Group case nodes to reduce the number of edges.
198 We do this after cleaning up dead labels because otherwise we miss
199 a lot of obvious case merging opportunities. */
200 group_case_labels ();
201
202 /* Create the edges of the flowgraph. */
203 make_edges ();
204
205 /* Debugging dumps. */
206
207 /* Write the flowgraph to a VCG file. */
208 {
209 int local_dump_flags;
210 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
211 if (vcg_file)
212 {
213 tree_cfg2vcg (vcg_file);
214 dump_end (TDI_vcg, vcg_file);
215 }
216 }
217
218 #ifdef ENABLE_CHECKING
219 verify_stmts ();
220 #endif
221
222 /* Dump a textual representation of the flowgraph. */
223 if (dump_file)
224 dump_tree_cfg (dump_file, dump_flags);
225 }
226
227 static unsigned int
228 execute_build_cfg (void)
229 {
230 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
231 return 0;
232 }
233
234 struct tree_opt_pass pass_build_cfg =
235 {
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts, /* todo_flags_finish */
248 0 /* letter */
249 };
250
251 /* Search the CFG for any computed gotos. If found, factor them to a
252 common computed goto site. Also record the location of that site so
253 that we can un-factor the gotos after we have converted back to
254 normal form. */
255
256 static void
257 factor_computed_gotos (void)
258 {
259 basic_block bb;
260 tree factored_label_decl = NULL;
261 tree var = NULL;
262 tree factored_computed_goto_label = NULL;
263 tree factored_computed_goto = NULL;
264
265 /* We know there are one or more computed gotos in this function.
266 Examine the last statement in each basic block to see if the block
267 ends with a computed goto. */
268
269 FOR_EACH_BB (bb)
270 {
271 block_stmt_iterator bsi = bsi_last (bb);
272 tree last;
273
274 if (bsi_end_p (bsi))
275 continue;
276 last = bsi_stmt (bsi);
277
278 /* Ignore the computed goto we create when we factor the original
279 computed gotos. */
280 if (last == factored_computed_goto)
281 continue;
282
283 /* If the last statement is a computed goto, factor it. */
284 if (computed_goto_p (last))
285 {
286 tree assignment;
287
288 /* The first time we find a computed goto we need to create
289 the factored goto block and the variable each original
290 computed goto will use for their goto destination. */
291 if (! factored_computed_goto)
292 {
293 basic_block new_bb = create_empty_bb (bb);
294 block_stmt_iterator new_bsi = bsi_start (new_bb);
295
296 /* Create the destination of the factored goto. Each original
297 computed goto will put its desired destination into this
298 variable and jump to the label we create immediately
299 below. */
300 var = create_tmp_var (ptr_type_node, "gotovar");
301
302 /* Build a label for the new block which will contain the
303 factored computed goto. */
304 factored_label_decl = create_artificial_label ();
305 factored_computed_goto_label
306 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
307 bsi_insert_after (&new_bsi, factored_computed_goto_label,
308 BSI_NEW_STMT);
309
310 /* Build our new computed goto. */
311 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
312 bsi_insert_after (&new_bsi, factored_computed_goto,
313 BSI_NEW_STMT);
314 }
315
316 /* Copy the original computed goto's destination into VAR. */
317 assignment = build2 (MODIFY_EXPR, ptr_type_node,
318 var, GOTO_DESTINATION (last));
319 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
320
321 /* And re-vector the computed goto to the new destination. */
322 GOTO_DESTINATION (last) = factored_label_decl;
323 }
324 }
325 }
326
327
328 /* Build a flowgraph for the statement_list STMT_LIST. */
329
330 static void
331 make_blocks (tree stmt_list)
332 {
333 tree_stmt_iterator i = tsi_start (stmt_list);
334 tree stmt = NULL;
335 bool start_new_block = true;
336 bool first_stmt_of_list = true;
337 basic_block bb = ENTRY_BLOCK_PTR;
338
339 while (!tsi_end_p (i))
340 {
341 tree prev_stmt;
342
343 prev_stmt = stmt;
344 stmt = tsi_stmt (i);
345
346 /* If the statement starts a new basic block or if we have determined
347 in a previous pass that we need to create a new block for STMT, do
348 so now. */
349 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
350 {
351 if (!first_stmt_of_list)
352 stmt_list = tsi_split_statement_list_before (&i);
353 bb = create_basic_block (stmt_list, NULL, bb);
354 start_new_block = false;
355 }
356
357 /* Now add STMT to BB and create the subgraphs for special statement
358 codes. */
359 set_bb_for_stmt (stmt, bb);
360
361 if (computed_goto_p (stmt))
362 found_computed_goto = true;
363
364 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
365 next iteration. */
366 if (stmt_ends_bb_p (stmt))
367 start_new_block = true;
368
369 tsi_next (&i);
370 first_stmt_of_list = false;
371 }
372 }
373
374
375 /* Create and return a new empty basic block after bb AFTER. */
376
377 static basic_block
378 create_bb (void *h, void *e, basic_block after)
379 {
380 basic_block bb;
381
382 gcc_assert (!e);
383
384 /* Create and initialize a new basic block. Since alloc_block uses
385 ggc_alloc_cleared to allocate a basic block, we do not have to
386 clear the newly allocated basic block here. */
387 bb = alloc_block ();
388
389 bb->index = last_basic_block;
390 bb->flags = BB_NEW;
391 bb->stmt_list = h ? (tree) h : alloc_stmt_list ();
392
393 /* Add the new block to the linked list of blocks. */
394 link_block (bb, after);
395
396 /* Grow the basic block array if needed. */
397 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
398 {
399 size_t old_size = VEC_length (basic_block, basic_block_info);
400 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
401 basic_block *p;
402 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
403 p = VEC_address (basic_block, basic_block_info);
404 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
405 }
406
407 /* Add the newly created block to the array. */
408 SET_BASIC_BLOCK (last_basic_block, bb);
409
410 n_basic_blocks++;
411 last_basic_block++;
412
413 return bb;
414 }
415
416
417 /*---------------------------------------------------------------------------
418 Edge creation
419 ---------------------------------------------------------------------------*/
420
421 /* Fold COND_EXPR_COND of each COND_EXPR. */
422
423 void
424 fold_cond_expr_cond (void)
425 {
426 basic_block bb;
427
428 FOR_EACH_BB (bb)
429 {
430 tree stmt = last_stmt (bb);
431
432 if (stmt
433 && TREE_CODE (stmt) == COND_EXPR)
434 {
435 tree cond = fold (COND_EXPR_COND (stmt));
436 if (integer_zerop (cond))
437 COND_EXPR_COND (stmt) = boolean_false_node;
438 else if (integer_onep (cond))
439 COND_EXPR_COND (stmt) = boolean_true_node;
440 }
441 }
442 }
443
444 /* Join all the blocks in the flowgraph. */
445
446 static void
447 make_edges (void)
448 {
449 basic_block bb;
450 struct omp_region *cur_region = NULL;
451
452 /* Create an edge from entry to the first block with executable
453 statements in it. */
454 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
455
456 /* Traverse the basic block array placing edges. */
457 FOR_EACH_BB (bb)
458 {
459 tree last = last_stmt (bb);
460 bool fallthru;
461
462 if (last)
463 {
464 enum tree_code code = TREE_CODE (last);
465 switch (code)
466 {
467 case GOTO_EXPR:
468 make_goto_expr_edges (bb);
469 fallthru = false;
470 break;
471 case RETURN_EXPR:
472 make_edge (bb, EXIT_BLOCK_PTR, 0);
473 fallthru = false;
474 break;
475 case COND_EXPR:
476 make_cond_expr_edges (bb);
477 fallthru = false;
478 break;
479 case SWITCH_EXPR:
480 make_switch_expr_edges (bb);
481 fallthru = false;
482 break;
483 case RESX_EXPR:
484 make_eh_edges (last);
485 fallthru = false;
486 break;
487
488 case CALL_EXPR:
489 /* If this function receives a nonlocal goto, then we need to
490 make edges from this call site to all the nonlocal goto
491 handlers. */
492 if (tree_can_make_abnormal_goto (last))
493 make_abnormal_goto_edges (bb, true);
494
495 /* If this statement has reachable exception handlers, then
496 create abnormal edges to them. */
497 make_eh_edges (last);
498
499 /* Some calls are known not to return. */
500 fallthru = !(call_expr_flags (last) & ECF_NORETURN);
501 break;
502
503 case MODIFY_EXPR:
504 if (is_ctrl_altering_stmt (last))
505 {
506 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the
507 CALL_EXPR may have an abnormal edge. Search the RHS for
508 this case and create any required edges. */
509 if (tree_can_make_abnormal_goto (last))
510 make_abnormal_goto_edges (bb, true);
511
512 make_eh_edges (last);
513 }
514 fallthru = true;
515 break;
516
517 case OMP_PARALLEL:
518 case OMP_FOR:
519 case OMP_SINGLE:
520 case OMP_MASTER:
521 case OMP_ORDERED:
522 case OMP_CRITICAL:
523 case OMP_SECTION:
524 cur_region = new_omp_region (bb, code, cur_region);
525 fallthru = true;
526 break;
527
528 case OMP_SECTIONS:
529 cur_region = new_omp_region (bb, code, cur_region);
530 fallthru = false;
531 break;
532
533 case OMP_RETURN:
534 /* In the case of an OMP_SECTION, the edge will go somewhere
535 other than the next block. This will be created later. */
536 cur_region->exit = bb;
537 fallthru = cur_region->type != OMP_SECTION;
538 cur_region = cur_region->outer;
539 break;
540
541 case OMP_CONTINUE:
542 cur_region->cont = bb;
543 switch (cur_region->type)
544 {
545 case OMP_FOR:
546 /* ??? Technically there should be a some sort of loopback
547 edge here, but it goes to a block that doesn't exist yet,
548 and without it, updating the ssa form would be a real
549 bear. Fortunately, we don't yet do ssa before expanding
550 these nodes. */
551 break;
552
553 case OMP_SECTIONS:
554 /* Wire up the edges into and out of the nested sections. */
555 /* ??? Similarly wrt loopback. */
556 {
557 struct omp_region *i;
558 for (i = cur_region->inner; i ; i = i->next)
559 {
560 gcc_assert (i->type == OMP_SECTION);
561 make_edge (cur_region->entry, i->entry, 0);
562 make_edge (i->exit, bb, EDGE_FALLTHRU);
563 }
564 }
565 break;
566
567 default:
568 gcc_unreachable ();
569 }
570 fallthru = true;
571 break;
572
573 default:
574 gcc_assert (!stmt_ends_bb_p (last));
575 fallthru = true;
576 }
577 }
578 else
579 fallthru = true;
580
581 if (fallthru)
582 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
583 }
584
585 if (root_omp_region)
586 free_omp_regions ();
587
588 /* Fold COND_EXPR_COND of each COND_EXPR. */
589 fold_cond_expr_cond ();
590
591 /* Clean up the graph and warn for unreachable code. */
592 cleanup_tree_cfg ();
593 }
594
595
596 /* Create the edges for a COND_EXPR starting at block BB.
597 At this point, both clauses must contain only simple gotos. */
598
599 static void
600 make_cond_expr_edges (basic_block bb)
601 {
602 tree entry = last_stmt (bb);
603 basic_block then_bb, else_bb;
604 tree then_label, else_label;
605 edge e;
606
607 gcc_assert (entry);
608 gcc_assert (TREE_CODE (entry) == COND_EXPR);
609
610 /* Entry basic blocks for each component. */
611 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
612 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
613 then_bb = label_to_block (then_label);
614 else_bb = label_to_block (else_label);
615
616 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
617 #ifdef USE_MAPPED_LOCATION
618 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
619 #else
620 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
621 #endif
622 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
623 if (e)
624 {
625 #ifdef USE_MAPPED_LOCATION
626 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
627 #else
628 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
629 #endif
630 }
631 }
632
633 /* Hashing routine for EDGE_TO_CASES. */
634
635 static hashval_t
636 edge_to_cases_hash (const void *p)
637 {
638 edge e = ((struct edge_to_cases_elt *)p)->e;
639
640 /* Hash on the edge itself (which is a pointer). */
641 return htab_hash_pointer (e);
642 }
643
644 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
645 for equality is just a pointer comparison. */
646
647 static int
648 edge_to_cases_eq (const void *p1, const void *p2)
649 {
650 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
651 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
652
653 return e1 == e2;
654 }
655
656 /* Called for each element in the hash table (P) as we delete the
657 edge to cases hash table.
658
659 Clear all the TREE_CHAINs to prevent problems with copying of
660 SWITCH_EXPRs and structure sharing rules, then free the hash table
661 element. */
662
663 static void
664 edge_to_cases_cleanup (void *p)
665 {
666 struct edge_to_cases_elt *elt = (struct edge_to_cases_elt *) p;
667 tree t, next;
668
669 for (t = elt->case_labels; t; t = next)
670 {
671 next = TREE_CHAIN (t);
672 TREE_CHAIN (t) = NULL;
673 }
674 free (p);
675 }
676
677 /* Start recording information mapping edges to case labels. */
678
679 void
680 start_recording_case_labels (void)
681 {
682 gcc_assert (edge_to_cases == NULL);
683
684 edge_to_cases = htab_create (37,
685 edge_to_cases_hash,
686 edge_to_cases_eq,
687 edge_to_cases_cleanup);
688 }
689
690 /* Return nonzero if we are recording information for case labels. */
691
692 static bool
693 recording_case_labels_p (void)
694 {
695 return (edge_to_cases != NULL);
696 }
697
698 /* Stop recording information mapping edges to case labels and
699 remove any information we have recorded. */
700 void
701 end_recording_case_labels (void)
702 {
703 htab_delete (edge_to_cases);
704 edge_to_cases = NULL;
705 }
706
707 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
708
709 static void
710 record_switch_edge (edge e, tree case_label)
711 {
712 struct edge_to_cases_elt *elt;
713 void **slot;
714
715 /* Build a hash table element so we can see if E is already
716 in the table. */
717 elt = XNEW (struct edge_to_cases_elt);
718 elt->e = e;
719 elt->case_labels = case_label;
720
721 slot = htab_find_slot (edge_to_cases, elt, INSERT);
722
723 if (*slot == NULL)
724 {
725 /* E was not in the hash table. Install E into the hash table. */
726 *slot = (void *)elt;
727 }
728 else
729 {
730 /* E was already in the hash table. Free ELT as we do not need it
731 anymore. */
732 free (elt);
733
734 /* Get the entry stored in the hash table. */
735 elt = (struct edge_to_cases_elt *) *slot;
736
737 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
738 TREE_CHAIN (case_label) = elt->case_labels;
739 elt->case_labels = case_label;
740 }
741 }
742
743 /* If we are inside a {start,end}_recording_cases block, then return
744 a chain of CASE_LABEL_EXPRs from T which reference E.
745
746 Otherwise return NULL. */
747
748 static tree
749 get_cases_for_edge (edge e, tree t)
750 {
751 struct edge_to_cases_elt elt, *elt_p;
752 void **slot;
753 size_t i, n;
754 tree vec;
755
756 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
757 chains available. Return NULL so the caller can detect this case. */
758 if (!recording_case_labels_p ())
759 return NULL;
760
761 restart:
762 elt.e = e;
763 elt.case_labels = NULL;
764 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
765
766 if (slot)
767 {
768 elt_p = (struct edge_to_cases_elt *)*slot;
769 return elt_p->case_labels;
770 }
771
772 /* If we did not find E in the hash table, then this must be the first
773 time we have been queried for information about E & T. Add all the
774 elements from T to the hash table then perform the query again. */
775
776 vec = SWITCH_LABELS (t);
777 n = TREE_VEC_LENGTH (vec);
778 for (i = 0; i < n; i++)
779 {
780 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
781 basic_block label_bb = label_to_block (lab);
782 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
783 }
784 goto restart;
785 }
786
787 /* Create the edges for a SWITCH_EXPR starting at block BB.
788 At this point, the switch body has been lowered and the
789 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
790
791 static void
792 make_switch_expr_edges (basic_block bb)
793 {
794 tree entry = last_stmt (bb);
795 size_t i, n;
796 tree vec;
797
798 vec = SWITCH_LABELS (entry);
799 n = TREE_VEC_LENGTH (vec);
800
801 for (i = 0; i < n; ++i)
802 {
803 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
804 basic_block label_bb = label_to_block (lab);
805 make_edge (bb, label_bb, 0);
806 }
807 }
808
809
810 /* Return the basic block holding label DEST. */
811
812 basic_block
813 label_to_block_fn (struct function *ifun, tree dest)
814 {
815 int uid = LABEL_DECL_UID (dest);
816
817 /* We would die hard when faced by an undefined label. Emit a label to
818 the very first basic block. This will hopefully make even the dataflow
819 and undefined variable warnings quite right. */
820 if ((errorcount || sorrycount) && uid < 0)
821 {
822 block_stmt_iterator bsi =
823 bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
824 tree stmt;
825
826 stmt = build1 (LABEL_EXPR, void_type_node, dest);
827 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
828 uid = LABEL_DECL_UID (dest);
829 }
830 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
831 <= (unsigned int) uid)
832 return NULL;
833 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
834 }
835
836 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
837 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
838
839 void
840 make_abnormal_goto_edges (basic_block bb, bool for_call)
841 {
842 basic_block target_bb;
843 block_stmt_iterator bsi;
844
845 FOR_EACH_BB (target_bb)
846 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
847 {
848 tree target = bsi_stmt (bsi);
849
850 if (TREE_CODE (target) != LABEL_EXPR)
851 break;
852
853 target = LABEL_EXPR_LABEL (target);
854
855 /* Make an edge to every label block that has been marked as a
856 potential target for a computed goto or a non-local goto. */
857 if ((FORCED_LABEL (target) && !for_call)
858 || (DECL_NONLOCAL (target) && for_call))
859 {
860 make_edge (bb, target_bb, EDGE_ABNORMAL);
861 break;
862 }
863 }
864 }
865
866 /* Create edges for a goto statement at block BB. */
867
868 static void
869 make_goto_expr_edges (basic_block bb)
870 {
871 block_stmt_iterator last = bsi_last (bb);
872 tree goto_t = bsi_stmt (last);
873
874 /* A simple GOTO creates normal edges. */
875 if (simple_goto_p (goto_t))
876 {
877 tree dest = GOTO_DESTINATION (goto_t);
878 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
879 #ifdef USE_MAPPED_LOCATION
880 e->goto_locus = EXPR_LOCATION (goto_t);
881 #else
882 e->goto_locus = EXPR_LOCUS (goto_t);
883 #endif
884 bsi_remove (&last, true);
885 return;
886 }
887
888 /* A computed GOTO creates abnormal edges. */
889 make_abnormal_goto_edges (bb, false);
890 }
891
892
893 /*---------------------------------------------------------------------------
894 Flowgraph analysis
895 ---------------------------------------------------------------------------*/
896
897 /* Cleanup useless labels in basic blocks. This is something we wish
898 to do early because it allows us to group case labels before creating
899 the edges for the CFG, and it speeds up block statement iterators in
900 all passes later on.
901 We only run this pass once, running it more than once is probably not
902 profitable. */
903
904 /* A map from basic block index to the leading label of that block. */
905 static tree *label_for_bb;
906
907 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
908 static void
909 update_eh_label (struct eh_region *region)
910 {
911 tree old_label = get_eh_region_tree_label (region);
912 if (old_label)
913 {
914 tree new_label;
915 basic_block bb = label_to_block (old_label);
916
917 /* ??? After optimizing, there may be EH regions with labels
918 that have already been removed from the function body, so
919 there is no basic block for them. */
920 if (! bb)
921 return;
922
923 new_label = label_for_bb[bb->index];
924 set_eh_region_tree_label (region, new_label);
925 }
926 }
927
928 /* Given LABEL return the first label in the same basic block. */
929 static tree
930 main_block_label (tree label)
931 {
932 basic_block bb = label_to_block (label);
933
934 /* label_to_block possibly inserted undefined label into the chain. */
935 if (!label_for_bb[bb->index])
936 label_for_bb[bb->index] = label;
937 return label_for_bb[bb->index];
938 }
939
940 /* Cleanup redundant labels. This is a three-step process:
941 1) Find the leading label for each block.
942 2) Redirect all references to labels to the leading labels.
943 3) Cleanup all useless labels. */
944
945 void
946 cleanup_dead_labels (void)
947 {
948 basic_block bb;
949 label_for_bb = XCNEWVEC (tree, last_basic_block);
950
951 /* Find a suitable label for each block. We use the first user-defined
952 label if there is one, or otherwise just the first label we see. */
953 FOR_EACH_BB (bb)
954 {
955 block_stmt_iterator i;
956
957 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
958 {
959 tree label, stmt = bsi_stmt (i);
960
961 if (TREE_CODE (stmt) != LABEL_EXPR)
962 break;
963
964 label = LABEL_EXPR_LABEL (stmt);
965
966 /* If we have not yet seen a label for the current block,
967 remember this one and see if there are more labels. */
968 if (! label_for_bb[bb->index])
969 {
970 label_for_bb[bb->index] = label;
971 continue;
972 }
973
974 /* If we did see a label for the current block already, but it
975 is an artificially created label, replace it if the current
976 label is a user defined label. */
977 if (! DECL_ARTIFICIAL (label)
978 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
979 {
980 label_for_bb[bb->index] = label;
981 break;
982 }
983 }
984 }
985
986 /* Now redirect all jumps/branches to the selected label.
987 First do so for each block ending in a control statement. */
988 FOR_EACH_BB (bb)
989 {
990 tree stmt = last_stmt (bb);
991 if (!stmt)
992 continue;
993
994 switch (TREE_CODE (stmt))
995 {
996 case COND_EXPR:
997 {
998 tree true_branch, false_branch;
999
1000 true_branch = COND_EXPR_THEN (stmt);
1001 false_branch = COND_EXPR_ELSE (stmt);
1002
1003 GOTO_DESTINATION (true_branch)
1004 = main_block_label (GOTO_DESTINATION (true_branch));
1005 GOTO_DESTINATION (false_branch)
1006 = main_block_label (GOTO_DESTINATION (false_branch));
1007
1008 break;
1009 }
1010
1011 case SWITCH_EXPR:
1012 {
1013 size_t i;
1014 tree vec = SWITCH_LABELS (stmt);
1015 size_t n = TREE_VEC_LENGTH (vec);
1016
1017 /* Replace all destination labels. */
1018 for (i = 0; i < n; ++i)
1019 {
1020 tree elt = TREE_VEC_ELT (vec, i);
1021 tree label = main_block_label (CASE_LABEL (elt));
1022 CASE_LABEL (elt) = label;
1023 }
1024 break;
1025 }
1026
1027 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1028 remove them until after we've created the CFG edges. */
1029 case GOTO_EXPR:
1030 if (! computed_goto_p (stmt))
1031 {
1032 GOTO_DESTINATION (stmt)
1033 = main_block_label (GOTO_DESTINATION (stmt));
1034 break;
1035 }
1036
1037 default:
1038 break;
1039 }
1040 }
1041
1042 for_each_eh_region (update_eh_label);
1043
1044 /* Finally, purge dead labels. All user-defined labels and labels that
1045 can be the target of non-local gotos and labels which have their
1046 address taken are preserved. */
1047 FOR_EACH_BB (bb)
1048 {
1049 block_stmt_iterator i;
1050 tree label_for_this_bb = label_for_bb[bb->index];
1051
1052 if (! label_for_this_bb)
1053 continue;
1054
1055 for (i = bsi_start (bb); !bsi_end_p (i); )
1056 {
1057 tree label, stmt = bsi_stmt (i);
1058
1059 if (TREE_CODE (stmt) != LABEL_EXPR)
1060 break;
1061
1062 label = LABEL_EXPR_LABEL (stmt);
1063
1064 if (label == label_for_this_bb
1065 || ! DECL_ARTIFICIAL (label)
1066 || DECL_NONLOCAL (label)
1067 || FORCED_LABEL (label))
1068 bsi_next (&i);
1069 else
1070 bsi_remove (&i, true);
1071 }
1072 }
1073
1074 free (label_for_bb);
1075 }
1076
1077 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1078 and scan the sorted vector of cases. Combine the ones jumping to the
1079 same label.
1080 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1081
1082 void
1083 group_case_labels (void)
1084 {
1085 basic_block bb;
1086
1087 FOR_EACH_BB (bb)
1088 {
1089 tree stmt = last_stmt (bb);
1090 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1091 {
1092 tree labels = SWITCH_LABELS (stmt);
1093 int old_size = TREE_VEC_LENGTH (labels);
1094 int i, j, new_size = old_size;
1095 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1096 tree default_label;
1097
1098 /* The default label is always the last case in a switch
1099 statement after gimplification. */
1100 default_label = CASE_LABEL (default_case);
1101
1102 /* Look for possible opportunities to merge cases.
1103 Ignore the last element of the label vector because it
1104 must be the default case. */
1105 i = 0;
1106 while (i < old_size - 1)
1107 {
1108 tree base_case, base_label, base_high;
1109 base_case = TREE_VEC_ELT (labels, i);
1110
1111 gcc_assert (base_case);
1112 base_label = CASE_LABEL (base_case);
1113
1114 /* Discard cases that have the same destination as the
1115 default case. */
1116 if (base_label == default_label)
1117 {
1118 TREE_VEC_ELT (labels, i) = NULL_TREE;
1119 i++;
1120 new_size--;
1121 continue;
1122 }
1123
1124 base_high = CASE_HIGH (base_case) ?
1125 CASE_HIGH (base_case) : CASE_LOW (base_case);
1126 i++;
1127 /* Try to merge case labels. Break out when we reach the end
1128 of the label vector or when we cannot merge the next case
1129 label with the current one. */
1130 while (i < old_size - 1)
1131 {
1132 tree merge_case = TREE_VEC_ELT (labels, i);
1133 tree merge_label = CASE_LABEL (merge_case);
1134 tree t = int_const_binop (PLUS_EXPR, base_high,
1135 integer_one_node, 1);
1136
1137 /* Merge the cases if they jump to the same place,
1138 and their ranges are consecutive. */
1139 if (merge_label == base_label
1140 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1141 {
1142 base_high = CASE_HIGH (merge_case) ?
1143 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1144 CASE_HIGH (base_case) = base_high;
1145 TREE_VEC_ELT (labels, i) = NULL_TREE;
1146 new_size--;
1147 i++;
1148 }
1149 else
1150 break;
1151 }
1152 }
1153
1154 /* Compress the case labels in the label vector, and adjust the
1155 length of the vector. */
1156 for (i = 0, j = 0; i < new_size; i++)
1157 {
1158 while (! TREE_VEC_ELT (labels, j))
1159 j++;
1160 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1161 }
1162 TREE_VEC_LENGTH (labels) = new_size;
1163 }
1164 }
1165 }
1166
1167 /* Checks whether we can merge block B into block A. */
1168
1169 static bool
1170 tree_can_merge_blocks_p (basic_block a, basic_block b)
1171 {
1172 tree stmt;
1173 block_stmt_iterator bsi;
1174 tree phi;
1175
1176 if (!single_succ_p (a))
1177 return false;
1178
1179 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1180 return false;
1181
1182 if (single_succ (a) != b)
1183 return false;
1184
1185 if (!single_pred_p (b))
1186 return false;
1187
1188 if (b == EXIT_BLOCK_PTR)
1189 return false;
1190
1191 /* If A ends by a statement causing exceptions or something similar, we
1192 cannot merge the blocks. */
1193 stmt = last_stmt (a);
1194 if (stmt && stmt_ends_bb_p (stmt))
1195 return false;
1196
1197 /* Do not allow a block with only a non-local label to be merged. */
1198 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1199 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1200 return false;
1201
1202 /* It must be possible to eliminate all phi nodes in B. If ssa form
1203 is not up-to-date, we cannot eliminate any phis. */
1204 phi = phi_nodes (b);
1205 if (phi)
1206 {
1207 if (need_ssa_update_p ())
1208 return false;
1209
1210 for (; phi; phi = PHI_CHAIN (phi))
1211 if (!is_gimple_reg (PHI_RESULT (phi))
1212 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1213 return false;
1214 }
1215
1216 /* Do not remove user labels. */
1217 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1218 {
1219 stmt = bsi_stmt (bsi);
1220 if (TREE_CODE (stmt) != LABEL_EXPR)
1221 break;
1222 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1223 return false;
1224 }
1225
1226 /* Protect the loop latches. */
1227 if (current_loops
1228 && b->loop_father->latch == b)
1229 return false;
1230
1231 return true;
1232 }
1233
1234 /* Replaces all uses of NAME by VAL. */
1235
1236 void
1237 replace_uses_by (tree name, tree val)
1238 {
1239 imm_use_iterator imm_iter;
1240 use_operand_p use;
1241 tree stmt;
1242 edge e;
1243 unsigned i;
1244
1245
1246 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1247 {
1248 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1249 {
1250 replace_exp (use, val);
1251
1252 if (TREE_CODE (stmt) == PHI_NODE)
1253 {
1254 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1255 if (e->flags & EDGE_ABNORMAL)
1256 {
1257 /* This can only occur for virtual operands, since
1258 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1259 would prevent replacement. */
1260 gcc_assert (!is_gimple_reg (name));
1261 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1262 }
1263 }
1264 }
1265 if (TREE_CODE (stmt) != PHI_NODE)
1266 {
1267 tree rhs;
1268
1269 fold_stmt_inplace (stmt);
1270 rhs = get_rhs (stmt);
1271 if (TREE_CODE (rhs) == ADDR_EXPR)
1272 recompute_tree_invariant_for_addr_expr (rhs);
1273
1274 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1275 mark_new_vars_to_rename (stmt);
1276 }
1277 }
1278
1279 gcc_assert (num_imm_uses (name) == 0);
1280
1281 /* Also update the trees stored in loop structures. */
1282 if (current_loops)
1283 {
1284 struct loop *loop;
1285
1286 for (i = 0; i < current_loops->num; i++)
1287 {
1288 loop = current_loops->parray[i];
1289 if (loop)
1290 substitute_in_loop_info (loop, name, val);
1291 }
1292 }
1293 }
1294
1295 /* Merge block B into block A. */
1296
1297 static void
1298 tree_merge_blocks (basic_block a, basic_block b)
1299 {
1300 block_stmt_iterator bsi;
1301 tree_stmt_iterator last;
1302 tree phi;
1303
1304 if (dump_file)
1305 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1306
1307 /* Remove all single-valued PHI nodes from block B of the form
1308 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1309 bsi = bsi_last (a);
1310 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1311 {
1312 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1313 tree copy;
1314 bool may_replace_uses = may_propagate_copy (def, use);
1315
1316 /* In case we have loops to care about, do not propagate arguments of
1317 loop closed ssa phi nodes. */
1318 if (current_loops
1319 && is_gimple_reg (def)
1320 && TREE_CODE (use) == SSA_NAME
1321 && a->loop_father != b->loop_father)
1322 may_replace_uses = false;
1323
1324 if (!may_replace_uses)
1325 {
1326 gcc_assert (is_gimple_reg (def));
1327
1328 /* Note that just emitting the copies is fine -- there is no problem
1329 with ordering of phi nodes. This is because A is the single
1330 predecessor of B, therefore results of the phi nodes cannot
1331 appear as arguments of the phi nodes. */
1332 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1333 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1334 SET_PHI_RESULT (phi, NULL_TREE);
1335 SSA_NAME_DEF_STMT (def) = copy;
1336 }
1337 else
1338 replace_uses_by (def, use);
1339
1340 remove_phi_node (phi, NULL);
1341 }
1342
1343 /* Ensure that B follows A. */
1344 move_block_after (b, a);
1345
1346 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1347 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1348
1349 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1350 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1351 {
1352 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1353 {
1354 tree label = bsi_stmt (bsi);
1355
1356 bsi_remove (&bsi, false);
1357 /* Now that we can thread computed gotos, we might have
1358 a situation where we have a forced label in block B
1359 However, the label at the start of block B might still be
1360 used in other ways (think about the runtime checking for
1361 Fortran assigned gotos). So we can not just delete the
1362 label. Instead we move the label to the start of block A. */
1363 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1364 {
1365 block_stmt_iterator dest_bsi = bsi_start (a);
1366 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1367 }
1368 }
1369 else
1370 {
1371 change_bb_for_stmt (bsi_stmt (bsi), a);
1372 bsi_next (&bsi);
1373 }
1374 }
1375
1376 /* Merge the chains. */
1377 last = tsi_last (a->stmt_list);
1378 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1379 b->stmt_list = NULL;
1380 }
1381
1382
1383 /* Return the one of two successors of BB that is not reachable by a
1384 reached by a complex edge, if there is one. Else, return BB. We use
1385 this in optimizations that use post-dominators for their heuristics,
1386 to catch the cases in C++ where function calls are involved. */
1387
1388 basic_block
1389 single_noncomplex_succ (basic_block bb)
1390 {
1391 edge e0, e1;
1392 if (EDGE_COUNT (bb->succs) != 2)
1393 return bb;
1394
1395 e0 = EDGE_SUCC (bb, 0);
1396 e1 = EDGE_SUCC (bb, 1);
1397 if (e0->flags & EDGE_COMPLEX)
1398 return e1->dest;
1399 if (e1->flags & EDGE_COMPLEX)
1400 return e0->dest;
1401
1402 return bb;
1403 }
1404
1405
1406 /* Walk the function tree removing unnecessary statements.
1407
1408 * Empty statement nodes are removed
1409
1410 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1411
1412 * Unnecessary COND_EXPRs are removed
1413
1414 * Some unnecessary BIND_EXPRs are removed
1415
1416 Clearly more work could be done. The trick is doing the analysis
1417 and removal fast enough to be a net improvement in compile times.
1418
1419 Note that when we remove a control structure such as a COND_EXPR
1420 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1421 to ensure we eliminate all the useless code. */
1422
1423 struct rus_data
1424 {
1425 tree *last_goto;
1426 bool repeat;
1427 bool may_throw;
1428 bool may_branch;
1429 bool has_label;
1430 };
1431
1432 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1433
1434 static bool
1435 remove_useless_stmts_warn_notreached (tree stmt)
1436 {
1437 if (EXPR_HAS_LOCATION (stmt))
1438 {
1439 location_t loc = EXPR_LOCATION (stmt);
1440 if (LOCATION_LINE (loc) > 0)
1441 {
1442 warning (0, "%Hwill never be executed", &loc);
1443 return true;
1444 }
1445 }
1446
1447 switch (TREE_CODE (stmt))
1448 {
1449 case STATEMENT_LIST:
1450 {
1451 tree_stmt_iterator i;
1452 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1453 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1454 return true;
1455 }
1456 break;
1457
1458 case COND_EXPR:
1459 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1460 return true;
1461 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1462 return true;
1463 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1464 return true;
1465 break;
1466
1467 case TRY_FINALLY_EXPR:
1468 case TRY_CATCH_EXPR:
1469 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1470 return true;
1471 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1472 return true;
1473 break;
1474
1475 case CATCH_EXPR:
1476 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1477 case EH_FILTER_EXPR:
1478 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1479 case BIND_EXPR:
1480 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1481
1482 default:
1483 /* Not a live container. */
1484 break;
1485 }
1486
1487 return false;
1488 }
1489
1490 static void
1491 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1492 {
1493 tree then_clause, else_clause, cond;
1494 bool save_has_label, then_has_label, else_has_label;
1495
1496 save_has_label = data->has_label;
1497 data->has_label = false;
1498 data->last_goto = NULL;
1499
1500 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1501
1502 then_has_label = data->has_label;
1503 data->has_label = false;
1504 data->last_goto = NULL;
1505
1506 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1507
1508 else_has_label = data->has_label;
1509 data->has_label = save_has_label | then_has_label | else_has_label;
1510
1511 then_clause = COND_EXPR_THEN (*stmt_p);
1512 else_clause = COND_EXPR_ELSE (*stmt_p);
1513 cond = fold (COND_EXPR_COND (*stmt_p));
1514
1515 /* If neither arm does anything at all, we can remove the whole IF. */
1516 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1517 {
1518 *stmt_p = build_empty_stmt ();
1519 data->repeat = true;
1520 }
1521
1522 /* If there are no reachable statements in an arm, then we can
1523 zap the entire conditional. */
1524 else if (integer_nonzerop (cond) && !else_has_label)
1525 {
1526 if (warn_notreached)
1527 remove_useless_stmts_warn_notreached (else_clause);
1528 *stmt_p = then_clause;
1529 data->repeat = true;
1530 }
1531 else if (integer_zerop (cond) && !then_has_label)
1532 {
1533 if (warn_notreached)
1534 remove_useless_stmts_warn_notreached (then_clause);
1535 *stmt_p = else_clause;
1536 data->repeat = true;
1537 }
1538
1539 /* Check a couple of simple things on then/else with single stmts. */
1540 else
1541 {
1542 tree then_stmt = expr_only (then_clause);
1543 tree else_stmt = expr_only (else_clause);
1544
1545 /* Notice branches to a common destination. */
1546 if (then_stmt && else_stmt
1547 && TREE_CODE (then_stmt) == GOTO_EXPR
1548 && TREE_CODE (else_stmt) == GOTO_EXPR
1549 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1550 {
1551 *stmt_p = then_stmt;
1552 data->repeat = true;
1553 }
1554
1555 /* If the THEN/ELSE clause merely assigns a value to a variable or
1556 parameter which is already known to contain that value, then
1557 remove the useless THEN/ELSE clause. */
1558 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1559 {
1560 if (else_stmt
1561 && TREE_CODE (else_stmt) == MODIFY_EXPR
1562 && TREE_OPERAND (else_stmt, 0) == cond
1563 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1564 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1565 }
1566 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1567 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1568 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1569 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1570 {
1571 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1572 ? then_stmt : else_stmt);
1573 tree *location = (TREE_CODE (cond) == EQ_EXPR
1574 ? &COND_EXPR_THEN (*stmt_p)
1575 : &COND_EXPR_ELSE (*stmt_p));
1576
1577 if (stmt
1578 && TREE_CODE (stmt) == MODIFY_EXPR
1579 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1580 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1581 *location = alloc_stmt_list ();
1582 }
1583 }
1584
1585 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1586 would be re-introduced during lowering. */
1587 data->last_goto = NULL;
1588 }
1589
1590
1591 static void
1592 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1593 {
1594 bool save_may_branch, save_may_throw;
1595 bool this_may_branch, this_may_throw;
1596
1597 /* Collect may_branch and may_throw information for the body only. */
1598 save_may_branch = data->may_branch;
1599 save_may_throw = data->may_throw;
1600 data->may_branch = false;
1601 data->may_throw = false;
1602 data->last_goto = NULL;
1603
1604 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1605
1606 this_may_branch = data->may_branch;
1607 this_may_throw = data->may_throw;
1608 data->may_branch |= save_may_branch;
1609 data->may_throw |= save_may_throw;
1610 data->last_goto = NULL;
1611
1612 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1613
1614 /* If the body is empty, then we can emit the FINALLY block without
1615 the enclosing TRY_FINALLY_EXPR. */
1616 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1617 {
1618 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1619 data->repeat = true;
1620 }
1621
1622 /* If the handler is empty, then we can emit the TRY block without
1623 the enclosing TRY_FINALLY_EXPR. */
1624 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1625 {
1626 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1627 data->repeat = true;
1628 }
1629
1630 /* If the body neither throws, nor branches, then we can safely
1631 string the TRY and FINALLY blocks together. */
1632 else if (!this_may_branch && !this_may_throw)
1633 {
1634 tree stmt = *stmt_p;
1635 *stmt_p = TREE_OPERAND (stmt, 0);
1636 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1637 data->repeat = true;
1638 }
1639 }
1640
1641
1642 static void
1643 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1644 {
1645 bool save_may_throw, this_may_throw;
1646 tree_stmt_iterator i;
1647 tree stmt;
1648
1649 /* Collect may_throw information for the body only. */
1650 save_may_throw = data->may_throw;
1651 data->may_throw = false;
1652 data->last_goto = NULL;
1653
1654 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1655
1656 this_may_throw = data->may_throw;
1657 data->may_throw = save_may_throw;
1658
1659 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1660 if (!this_may_throw)
1661 {
1662 if (warn_notreached)
1663 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1664 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1665 data->repeat = true;
1666 return;
1667 }
1668
1669 /* Process the catch clause specially. We may be able to tell that
1670 no exceptions propagate past this point. */
1671
1672 this_may_throw = true;
1673 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1674 stmt = tsi_stmt (i);
1675 data->last_goto = NULL;
1676
1677 switch (TREE_CODE (stmt))
1678 {
1679 case CATCH_EXPR:
1680 for (; !tsi_end_p (i); tsi_next (&i))
1681 {
1682 stmt = tsi_stmt (i);
1683 /* If we catch all exceptions, then the body does not
1684 propagate exceptions past this point. */
1685 if (CATCH_TYPES (stmt) == NULL)
1686 this_may_throw = false;
1687 data->last_goto = NULL;
1688 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1689 }
1690 break;
1691
1692 case EH_FILTER_EXPR:
1693 if (EH_FILTER_MUST_NOT_THROW (stmt))
1694 this_may_throw = false;
1695 else if (EH_FILTER_TYPES (stmt) == NULL)
1696 this_may_throw = false;
1697 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1698 break;
1699
1700 default:
1701 /* Otherwise this is a cleanup. */
1702 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1703
1704 /* If the cleanup is empty, then we can emit the TRY block without
1705 the enclosing TRY_CATCH_EXPR. */
1706 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1707 {
1708 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1709 data->repeat = true;
1710 }
1711 break;
1712 }
1713 data->may_throw |= this_may_throw;
1714 }
1715
1716
1717 static void
1718 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1719 {
1720 tree block;
1721
1722 /* First remove anything underneath the BIND_EXPR. */
1723 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1724
1725 /* If the BIND_EXPR has no variables, then we can pull everything
1726 up one level and remove the BIND_EXPR, unless this is the toplevel
1727 BIND_EXPR for the current function or an inlined function.
1728
1729 When this situation occurs we will want to apply this
1730 optimization again. */
1731 block = BIND_EXPR_BLOCK (*stmt_p);
1732 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1733 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1734 && (! block
1735 || ! BLOCK_ABSTRACT_ORIGIN (block)
1736 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1737 != FUNCTION_DECL)))
1738 {
1739 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1740 data->repeat = true;
1741 }
1742 }
1743
1744
1745 static void
1746 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1747 {
1748 tree dest = GOTO_DESTINATION (*stmt_p);
1749
1750 data->may_branch = true;
1751 data->last_goto = NULL;
1752
1753 /* Record the last goto expr, so that we can delete it if unnecessary. */
1754 if (TREE_CODE (dest) == LABEL_DECL)
1755 data->last_goto = stmt_p;
1756 }
1757
1758
1759 static void
1760 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1761 {
1762 tree label = LABEL_EXPR_LABEL (*stmt_p);
1763
1764 data->has_label = true;
1765
1766 /* We do want to jump across non-local label receiver code. */
1767 if (DECL_NONLOCAL (label))
1768 data->last_goto = NULL;
1769
1770 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1771 {
1772 *data->last_goto = build_empty_stmt ();
1773 data->repeat = true;
1774 }
1775
1776 /* ??? Add something here to delete unused labels. */
1777 }
1778
1779
1780 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1781 decl. This allows us to eliminate redundant or useless
1782 calls to "const" functions.
1783
1784 Gimplifier already does the same operation, but we may notice functions
1785 being const and pure once their calls has been gimplified, so we need
1786 to update the flag. */
1787
1788 static void
1789 update_call_expr_flags (tree call)
1790 {
1791 tree decl = get_callee_fndecl (call);
1792 if (!decl)
1793 return;
1794 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1795 TREE_SIDE_EFFECTS (call) = 0;
1796 if (TREE_NOTHROW (decl))
1797 TREE_NOTHROW (call) = 1;
1798 }
1799
1800
1801 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1802
1803 void
1804 notice_special_calls (tree t)
1805 {
1806 int flags = call_expr_flags (t);
1807
1808 if (flags & ECF_MAY_BE_ALLOCA)
1809 current_function_calls_alloca = true;
1810 if (flags & ECF_RETURNS_TWICE)
1811 current_function_calls_setjmp = true;
1812 }
1813
1814
1815 /* Clear flags set by notice_special_calls. Used by dead code removal
1816 to update the flags. */
1817
1818 void
1819 clear_special_calls (void)
1820 {
1821 current_function_calls_alloca = false;
1822 current_function_calls_setjmp = false;
1823 }
1824
1825
1826 static void
1827 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1828 {
1829 tree t = *tp, op;
1830
1831 switch (TREE_CODE (t))
1832 {
1833 case COND_EXPR:
1834 remove_useless_stmts_cond (tp, data);
1835 break;
1836
1837 case TRY_FINALLY_EXPR:
1838 remove_useless_stmts_tf (tp, data);
1839 break;
1840
1841 case TRY_CATCH_EXPR:
1842 remove_useless_stmts_tc (tp, data);
1843 break;
1844
1845 case BIND_EXPR:
1846 remove_useless_stmts_bind (tp, data);
1847 break;
1848
1849 case GOTO_EXPR:
1850 remove_useless_stmts_goto (tp, data);
1851 break;
1852
1853 case LABEL_EXPR:
1854 remove_useless_stmts_label (tp, data);
1855 break;
1856
1857 case RETURN_EXPR:
1858 fold_stmt (tp);
1859 data->last_goto = NULL;
1860 data->may_branch = true;
1861 break;
1862
1863 case CALL_EXPR:
1864 fold_stmt (tp);
1865 data->last_goto = NULL;
1866 notice_special_calls (t);
1867 update_call_expr_flags (t);
1868 if (tree_could_throw_p (t))
1869 data->may_throw = true;
1870 break;
1871
1872 case MODIFY_EXPR:
1873 data->last_goto = NULL;
1874 fold_stmt (tp);
1875 op = get_call_expr_in (t);
1876 if (op)
1877 {
1878 update_call_expr_flags (op);
1879 notice_special_calls (op);
1880 }
1881 if (tree_could_throw_p (t))
1882 data->may_throw = true;
1883 break;
1884
1885 case STATEMENT_LIST:
1886 {
1887 tree_stmt_iterator i = tsi_start (t);
1888 while (!tsi_end_p (i))
1889 {
1890 t = tsi_stmt (i);
1891 if (IS_EMPTY_STMT (t))
1892 {
1893 tsi_delink (&i);
1894 continue;
1895 }
1896
1897 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1898
1899 t = tsi_stmt (i);
1900 if (TREE_CODE (t) == STATEMENT_LIST)
1901 {
1902 tsi_link_before (&i, t, TSI_SAME_STMT);
1903 tsi_delink (&i);
1904 }
1905 else
1906 tsi_next (&i);
1907 }
1908 }
1909 break;
1910 case ASM_EXPR:
1911 fold_stmt (tp);
1912 data->last_goto = NULL;
1913 break;
1914
1915 default:
1916 data->last_goto = NULL;
1917 break;
1918 }
1919 }
1920
1921 static unsigned int
1922 remove_useless_stmts (void)
1923 {
1924 struct rus_data data;
1925
1926 clear_special_calls ();
1927
1928 do
1929 {
1930 memset (&data, 0, sizeof (data));
1931 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1932 }
1933 while (data.repeat);
1934 return 0;
1935 }
1936
1937
1938 struct tree_opt_pass pass_remove_useless_stmts =
1939 {
1940 "useless", /* name */
1941 NULL, /* gate */
1942 remove_useless_stmts, /* execute */
1943 NULL, /* sub */
1944 NULL, /* next */
1945 0, /* static_pass_number */
1946 0, /* tv_id */
1947 PROP_gimple_any, /* properties_required */
1948 0, /* properties_provided */
1949 0, /* properties_destroyed */
1950 0, /* todo_flags_start */
1951 TODO_dump_func, /* todo_flags_finish */
1952 0 /* letter */
1953 };
1954
1955 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1956
1957 static void
1958 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1959 {
1960 tree phi;
1961
1962 /* Since this block is no longer reachable, we can just delete all
1963 of its PHI nodes. */
1964 phi = phi_nodes (bb);
1965 while (phi)
1966 {
1967 tree next = PHI_CHAIN (phi);
1968 remove_phi_node (phi, NULL_TREE);
1969 phi = next;
1970 }
1971
1972 /* Remove edges to BB's successors. */
1973 while (EDGE_COUNT (bb->succs) > 0)
1974 remove_edge (EDGE_SUCC (bb, 0));
1975 }
1976
1977
1978 /* Remove statements of basic block BB. */
1979
1980 static void
1981 remove_bb (basic_block bb)
1982 {
1983 block_stmt_iterator i;
1984 #ifdef USE_MAPPED_LOCATION
1985 source_location loc = UNKNOWN_LOCATION;
1986 #else
1987 source_locus loc = 0;
1988 #endif
1989
1990 if (dump_file)
1991 {
1992 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1993 if (dump_flags & TDF_DETAILS)
1994 {
1995 dump_bb (bb, dump_file, 0);
1996 fprintf (dump_file, "\n");
1997 }
1998 }
1999
2000 /* If we remove the header or the latch of a loop, mark the loop for
2001 removal by setting its header and latch to NULL. */
2002 if (current_loops)
2003 {
2004 struct loop *loop = bb->loop_father;
2005
2006 if (loop->latch == bb
2007 || loop->header == bb)
2008 {
2009 loop->latch = NULL;
2010 loop->header = NULL;
2011
2012 /* Also clean up the information associated with the loop. Updating
2013 it would waste time. More importantly, it may refer to ssa
2014 names that were defined in other removed basic block -- these
2015 ssa names are now removed and invalid. */
2016 free_numbers_of_iterations_estimates_loop (loop);
2017 }
2018 }
2019
2020 /* Remove all the instructions in the block. */
2021 for (i = bsi_start (bb); !bsi_end_p (i);)
2022 {
2023 tree stmt = bsi_stmt (i);
2024 if (TREE_CODE (stmt) == LABEL_EXPR
2025 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2026 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2027 {
2028 basic_block new_bb;
2029 block_stmt_iterator new_bsi;
2030
2031 /* A non-reachable non-local label may still be referenced.
2032 But it no longer needs to carry the extra semantics of
2033 non-locality. */
2034 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2035 {
2036 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2037 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2038 }
2039
2040 new_bb = bb->prev_bb;
2041 new_bsi = bsi_start (new_bb);
2042 bsi_remove (&i, false);
2043 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2044 }
2045 else
2046 {
2047 /* Release SSA definitions if we are in SSA. Note that we
2048 may be called when not in SSA. For example,
2049 final_cleanup calls this function via
2050 cleanup_tree_cfg. */
2051 if (in_ssa_p)
2052 release_defs (stmt);
2053
2054 bsi_remove (&i, true);
2055 }
2056
2057 /* Don't warn for removed gotos. Gotos are often removed due to
2058 jump threading, thus resulting in bogus warnings. Not great,
2059 since this way we lose warnings for gotos in the original
2060 program that are indeed unreachable. */
2061 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2062 {
2063 #ifdef USE_MAPPED_LOCATION
2064 if (EXPR_HAS_LOCATION (stmt))
2065 loc = EXPR_LOCATION (stmt);
2066 #else
2067 source_locus t;
2068 t = EXPR_LOCUS (stmt);
2069 if (t && LOCATION_LINE (*t) > 0)
2070 loc = t;
2071 #endif
2072 }
2073 }
2074
2075 /* If requested, give a warning that the first statement in the
2076 block is unreachable. We walk statements backwards in the
2077 loop above, so the last statement we process is the first statement
2078 in the block. */
2079 #ifdef USE_MAPPED_LOCATION
2080 if (loc > BUILTINS_LOCATION)
2081 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2082 #else
2083 if (loc)
2084 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2085 #endif
2086
2087 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2088 }
2089
2090
2091 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2092 predicate VAL, return the edge that will be taken out of the block.
2093 If VAL does not match a unique edge, NULL is returned. */
2094
2095 edge
2096 find_taken_edge (basic_block bb, tree val)
2097 {
2098 tree stmt;
2099
2100 stmt = last_stmt (bb);
2101
2102 gcc_assert (stmt);
2103 gcc_assert (is_ctrl_stmt (stmt));
2104 gcc_assert (val);
2105
2106 if (! is_gimple_min_invariant (val))
2107 return NULL;
2108
2109 if (TREE_CODE (stmt) == COND_EXPR)
2110 return find_taken_edge_cond_expr (bb, val);
2111
2112 if (TREE_CODE (stmt) == SWITCH_EXPR)
2113 return find_taken_edge_switch_expr (bb, val);
2114
2115 if (computed_goto_p (stmt))
2116 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2117
2118 gcc_unreachable ();
2119 }
2120
2121 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2122 statement, determine which of the outgoing edges will be taken out of the
2123 block. Return NULL if either edge may be taken. */
2124
2125 static edge
2126 find_taken_edge_computed_goto (basic_block bb, tree val)
2127 {
2128 basic_block dest;
2129 edge e = NULL;
2130
2131 dest = label_to_block (val);
2132 if (dest)
2133 {
2134 e = find_edge (bb, dest);
2135 gcc_assert (e != NULL);
2136 }
2137
2138 return e;
2139 }
2140
2141 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2142 statement, determine which of the two edges will be taken out of the
2143 block. Return NULL if either edge may be taken. */
2144
2145 static edge
2146 find_taken_edge_cond_expr (basic_block bb, tree val)
2147 {
2148 edge true_edge, false_edge;
2149
2150 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2151
2152 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2153 return (zero_p (val) ? false_edge : true_edge);
2154 }
2155
2156 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2157 statement, determine which edge will be taken out of the block. Return
2158 NULL if any edge may be taken. */
2159
2160 static edge
2161 find_taken_edge_switch_expr (basic_block bb, tree val)
2162 {
2163 tree switch_expr, taken_case;
2164 basic_block dest_bb;
2165 edge e;
2166
2167 switch_expr = last_stmt (bb);
2168 taken_case = find_case_label_for_value (switch_expr, val);
2169 dest_bb = label_to_block (CASE_LABEL (taken_case));
2170
2171 e = find_edge (bb, dest_bb);
2172 gcc_assert (e);
2173 return e;
2174 }
2175
2176
2177 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2178 We can make optimal use here of the fact that the case labels are
2179 sorted: We can do a binary search for a case matching VAL. */
2180
2181 static tree
2182 find_case_label_for_value (tree switch_expr, tree val)
2183 {
2184 tree vec = SWITCH_LABELS (switch_expr);
2185 size_t low, high, n = TREE_VEC_LENGTH (vec);
2186 tree default_case = TREE_VEC_ELT (vec, n - 1);
2187
2188 for (low = -1, high = n - 1; high - low > 1; )
2189 {
2190 size_t i = (high + low) / 2;
2191 tree t = TREE_VEC_ELT (vec, i);
2192 int cmp;
2193
2194 /* Cache the result of comparing CASE_LOW and val. */
2195 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2196
2197 if (cmp > 0)
2198 high = i;
2199 else
2200 low = i;
2201
2202 if (CASE_HIGH (t) == NULL)
2203 {
2204 /* A singe-valued case label. */
2205 if (cmp == 0)
2206 return t;
2207 }
2208 else
2209 {
2210 /* A case range. We can only handle integer ranges. */
2211 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2212 return t;
2213 }
2214 }
2215
2216 return default_case;
2217 }
2218
2219
2220
2221
2222 /*---------------------------------------------------------------------------
2223 Debugging functions
2224 ---------------------------------------------------------------------------*/
2225
2226 /* Dump tree-specific information of block BB to file OUTF. */
2227
2228 void
2229 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2230 {
2231 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2232 }
2233
2234
2235 /* Dump a basic block on stderr. */
2236
2237 void
2238 debug_tree_bb (basic_block bb)
2239 {
2240 dump_bb (bb, stderr, 0);
2241 }
2242
2243
2244 /* Dump basic block with index N on stderr. */
2245
2246 basic_block
2247 debug_tree_bb_n (int n)
2248 {
2249 debug_tree_bb (BASIC_BLOCK (n));
2250 return BASIC_BLOCK (n);
2251 }
2252
2253
2254 /* Dump the CFG on stderr.
2255
2256 FLAGS are the same used by the tree dumping functions
2257 (see TDF_* in tree-pass.h). */
2258
2259 void
2260 debug_tree_cfg (int flags)
2261 {
2262 dump_tree_cfg (stderr, flags);
2263 }
2264
2265
2266 /* Dump the program showing basic block boundaries on the given FILE.
2267
2268 FLAGS are the same used by the tree dumping functions (see TDF_* in
2269 tree.h). */
2270
2271 void
2272 dump_tree_cfg (FILE *file, int flags)
2273 {
2274 if (flags & TDF_DETAILS)
2275 {
2276 const char *funcname
2277 = lang_hooks.decl_printable_name (current_function_decl, 2);
2278
2279 fputc ('\n', file);
2280 fprintf (file, ";; Function %s\n\n", funcname);
2281 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2282 n_basic_blocks, n_edges, last_basic_block);
2283
2284 brief_dump_cfg (file);
2285 fprintf (file, "\n");
2286 }
2287
2288 if (flags & TDF_STATS)
2289 dump_cfg_stats (file);
2290
2291 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2292 }
2293
2294
2295 /* Dump CFG statistics on FILE. */
2296
2297 void
2298 dump_cfg_stats (FILE *file)
2299 {
2300 static long max_num_merged_labels = 0;
2301 unsigned long size, total = 0;
2302 long num_edges;
2303 basic_block bb;
2304 const char * const fmt_str = "%-30s%-13s%12s\n";
2305 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2306 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2307 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2308 const char *funcname
2309 = lang_hooks.decl_printable_name (current_function_decl, 2);
2310
2311
2312 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2313
2314 fprintf (file, "---------------------------------------------------------\n");
2315 fprintf (file, fmt_str, "", " Number of ", "Memory");
2316 fprintf (file, fmt_str, "", " instances ", "used ");
2317 fprintf (file, "---------------------------------------------------------\n");
2318
2319 size = n_basic_blocks * sizeof (struct basic_block_def);
2320 total += size;
2321 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2322 SCALE (size), LABEL (size));
2323
2324 num_edges = 0;
2325 FOR_EACH_BB (bb)
2326 num_edges += EDGE_COUNT (bb->succs);
2327 size = num_edges * sizeof (struct edge_def);
2328 total += size;
2329 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2330
2331 fprintf (file, "---------------------------------------------------------\n");
2332 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2333 LABEL (total));
2334 fprintf (file, "---------------------------------------------------------\n");
2335 fprintf (file, "\n");
2336
2337 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2338 max_num_merged_labels = cfg_stats.num_merged_labels;
2339
2340 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2341 cfg_stats.num_merged_labels, max_num_merged_labels);
2342
2343 fprintf (file, "\n");
2344 }
2345
2346
2347 /* Dump CFG statistics on stderr. Keep extern so that it's always
2348 linked in the final executable. */
2349
2350 void
2351 debug_cfg_stats (void)
2352 {
2353 dump_cfg_stats (stderr);
2354 }
2355
2356
2357 /* Dump the flowgraph to a .vcg FILE. */
2358
2359 static void
2360 tree_cfg2vcg (FILE *file)
2361 {
2362 edge e;
2363 edge_iterator ei;
2364 basic_block bb;
2365 const char *funcname
2366 = lang_hooks.decl_printable_name (current_function_decl, 2);
2367
2368 /* Write the file header. */
2369 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2370 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2371 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2372
2373 /* Write blocks and edges. */
2374 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2375 {
2376 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2377 e->dest->index);
2378
2379 if (e->flags & EDGE_FAKE)
2380 fprintf (file, " linestyle: dotted priority: 10");
2381 else
2382 fprintf (file, " linestyle: solid priority: 100");
2383
2384 fprintf (file, " }\n");
2385 }
2386 fputc ('\n', file);
2387
2388 FOR_EACH_BB (bb)
2389 {
2390 enum tree_code head_code, end_code;
2391 const char *head_name, *end_name;
2392 int head_line = 0;
2393 int end_line = 0;
2394 tree first = first_stmt (bb);
2395 tree last = last_stmt (bb);
2396
2397 if (first)
2398 {
2399 head_code = TREE_CODE (first);
2400 head_name = tree_code_name[head_code];
2401 head_line = get_lineno (first);
2402 }
2403 else
2404 head_name = "no-statement";
2405
2406 if (last)
2407 {
2408 end_code = TREE_CODE (last);
2409 end_name = tree_code_name[end_code];
2410 end_line = get_lineno (last);
2411 }
2412 else
2413 end_name = "no-statement";
2414
2415 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2416 bb->index, bb->index, head_name, head_line, end_name,
2417 end_line);
2418
2419 FOR_EACH_EDGE (e, ei, bb->succs)
2420 {
2421 if (e->dest == EXIT_BLOCK_PTR)
2422 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2423 else
2424 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2425
2426 if (e->flags & EDGE_FAKE)
2427 fprintf (file, " priority: 10 linestyle: dotted");
2428 else
2429 fprintf (file, " priority: 100 linestyle: solid");
2430
2431 fprintf (file, " }\n");
2432 }
2433
2434 if (bb->next_bb != EXIT_BLOCK_PTR)
2435 fputc ('\n', file);
2436 }
2437
2438 fputs ("}\n\n", file);
2439 }
2440
2441
2442
2443 /*---------------------------------------------------------------------------
2444 Miscellaneous helpers
2445 ---------------------------------------------------------------------------*/
2446
2447 /* Return true if T represents a stmt that always transfers control. */
2448
2449 bool
2450 is_ctrl_stmt (tree t)
2451 {
2452 return (TREE_CODE (t) == COND_EXPR
2453 || TREE_CODE (t) == SWITCH_EXPR
2454 || TREE_CODE (t) == GOTO_EXPR
2455 || TREE_CODE (t) == RETURN_EXPR
2456 || TREE_CODE (t) == RESX_EXPR);
2457 }
2458
2459
2460 /* Return true if T is a statement that may alter the flow of control
2461 (e.g., a call to a non-returning function). */
2462
2463 bool
2464 is_ctrl_altering_stmt (tree t)
2465 {
2466 tree call;
2467
2468 gcc_assert (t);
2469 call = get_call_expr_in (t);
2470 if (call)
2471 {
2472 /* A non-pure/const CALL_EXPR alters flow control if the current
2473 function has nonlocal labels. */
2474 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2475 return true;
2476
2477 /* A CALL_EXPR also alters control flow if it does not return. */
2478 if (call_expr_flags (call) & ECF_NORETURN)
2479 return true;
2480 }
2481
2482 /* OpenMP directives alter control flow. */
2483 if (OMP_DIRECTIVE_P (t))
2484 return true;
2485
2486 /* If a statement can throw, it alters control flow. */
2487 return tree_can_throw_internal (t);
2488 }
2489
2490
2491 /* Return true if T is a computed goto. */
2492
2493 bool
2494 computed_goto_p (tree t)
2495 {
2496 return (TREE_CODE (t) == GOTO_EXPR
2497 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2498 }
2499
2500
2501 /* Return true if T is a simple local goto. */
2502
2503 bool
2504 simple_goto_p (tree t)
2505 {
2506 return (TREE_CODE (t) == GOTO_EXPR
2507 && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
2508 }
2509
2510
2511 /* Return true if T can make an abnormal transfer of control flow.
2512 Transfers of control flow associated with EH are excluded. */
2513
2514 bool
2515 tree_can_make_abnormal_goto (tree t)
2516 {
2517 if (computed_goto_p (t))
2518 return true;
2519 if (TREE_CODE (t) == MODIFY_EXPR)
2520 t = TREE_OPERAND (t, 1);
2521 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2522 t = TREE_OPERAND (t, 0);
2523 if (TREE_CODE (t) == CALL_EXPR)
2524 return TREE_SIDE_EFFECTS (t) && current_function_has_nonlocal_label;
2525 return false;
2526 }
2527
2528
2529 /* Return true if T should start a new basic block. PREV_T is the
2530 statement preceding T. It is used when T is a label or a case label.
2531 Labels should only start a new basic block if their previous statement
2532 wasn't a label. Otherwise, sequence of labels would generate
2533 unnecessary basic blocks that only contain a single label. */
2534
2535 static inline bool
2536 stmt_starts_bb_p (tree t, tree prev_t)
2537 {
2538 if (t == NULL_TREE)
2539 return false;
2540
2541 /* LABEL_EXPRs start a new basic block only if the preceding
2542 statement wasn't a label of the same type. This prevents the
2543 creation of consecutive blocks that have nothing but a single
2544 label. */
2545 if (TREE_CODE (t) == LABEL_EXPR)
2546 {
2547 /* Nonlocal and computed GOTO targets always start a new block. */
2548 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2549 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2550 return true;
2551
2552 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2553 {
2554 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2555 return true;
2556
2557 cfg_stats.num_merged_labels++;
2558 return false;
2559 }
2560 else
2561 return true;
2562 }
2563
2564 return false;
2565 }
2566
2567
2568 /* Return true if T should end a basic block. */
2569
2570 bool
2571 stmt_ends_bb_p (tree t)
2572 {
2573 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2574 }
2575
2576
2577 /* Add gotos that used to be represented implicitly in the CFG. */
2578
2579 void
2580 disband_implicit_edges (void)
2581 {
2582 basic_block bb;
2583 block_stmt_iterator last;
2584 edge e;
2585 edge_iterator ei;
2586 tree stmt, label;
2587
2588 FOR_EACH_BB (bb)
2589 {
2590 last = bsi_last (bb);
2591 stmt = last_stmt (bb);
2592
2593 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2594 {
2595 /* Remove superfluous gotos from COND_EXPR branches. Moved
2596 from cfg_remove_useless_stmts here since it violates the
2597 invariants for tree--cfg correspondence and thus fits better
2598 here where we do it anyway. */
2599 e = find_edge (bb, bb->next_bb);
2600 if (e)
2601 {
2602 if (e->flags & EDGE_TRUE_VALUE)
2603 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2604 else if (e->flags & EDGE_FALSE_VALUE)
2605 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2606 else
2607 gcc_unreachable ();
2608 e->flags |= EDGE_FALLTHRU;
2609 }
2610
2611 continue;
2612 }
2613
2614 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2615 {
2616 /* Remove the RETURN_EXPR if we may fall though to the exit
2617 instead. */
2618 gcc_assert (single_succ_p (bb));
2619 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2620
2621 if (bb->next_bb == EXIT_BLOCK_PTR
2622 && !TREE_OPERAND (stmt, 0))
2623 {
2624 bsi_remove (&last, true);
2625 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2626 }
2627 continue;
2628 }
2629
2630 /* There can be no fallthru edge if the last statement is a control
2631 one. */
2632 if (stmt && is_ctrl_stmt (stmt))
2633 continue;
2634
2635 /* Find a fallthru edge and emit the goto if necessary. */
2636 FOR_EACH_EDGE (e, ei, bb->succs)
2637 if (e->flags & EDGE_FALLTHRU)
2638 break;
2639
2640 if (!e || e->dest == bb->next_bb)
2641 continue;
2642
2643 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2644 label = tree_block_label (e->dest);
2645
2646 stmt = build1 (GOTO_EXPR, void_type_node, label);
2647 #ifdef USE_MAPPED_LOCATION
2648 SET_EXPR_LOCATION (stmt, e->goto_locus);
2649 #else
2650 SET_EXPR_LOCUS (stmt, e->goto_locus);
2651 #endif
2652 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2653 e->flags &= ~EDGE_FALLTHRU;
2654 }
2655 }
2656
2657 /* Remove block annotations and other datastructures. */
2658
2659 void
2660 delete_tree_cfg_annotations (void)
2661 {
2662 label_to_block_map = NULL;
2663 }
2664
2665
2666 /* Return the first statement in basic block BB. */
2667
2668 tree
2669 first_stmt (basic_block bb)
2670 {
2671 block_stmt_iterator i = bsi_start (bb);
2672 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2673 }
2674
2675
2676 /* Return the last statement in basic block BB. */
2677
2678 tree
2679 last_stmt (basic_block bb)
2680 {
2681 block_stmt_iterator b = bsi_last (bb);
2682 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2683 }
2684
2685
2686 /* Return a pointer to the last statement in block BB. */
2687
2688 tree *
2689 last_stmt_ptr (basic_block bb)
2690 {
2691 block_stmt_iterator last = bsi_last (bb);
2692 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2693 }
2694
2695
2696 /* Return the last statement of an otherwise empty block. Return NULL
2697 if the block is totally empty, or if it contains more than one
2698 statement. */
2699
2700 tree
2701 last_and_only_stmt (basic_block bb)
2702 {
2703 block_stmt_iterator i = bsi_last (bb);
2704 tree last, prev;
2705
2706 if (bsi_end_p (i))
2707 return NULL_TREE;
2708
2709 last = bsi_stmt (i);
2710 bsi_prev (&i);
2711 if (bsi_end_p (i))
2712 return last;
2713
2714 /* Empty statements should no longer appear in the instruction stream.
2715 Everything that might have appeared before should be deleted by
2716 remove_useless_stmts, and the optimizers should just bsi_remove
2717 instead of smashing with build_empty_stmt.
2718
2719 Thus the only thing that should appear here in a block containing
2720 one executable statement is a label. */
2721 prev = bsi_stmt (i);
2722 if (TREE_CODE (prev) == LABEL_EXPR)
2723 return last;
2724 else
2725 return NULL_TREE;
2726 }
2727
2728
2729 /* Mark BB as the basic block holding statement T. */
2730
2731 void
2732 set_bb_for_stmt (tree t, basic_block bb)
2733 {
2734 if (TREE_CODE (t) == PHI_NODE)
2735 PHI_BB (t) = bb;
2736 else if (TREE_CODE (t) == STATEMENT_LIST)
2737 {
2738 tree_stmt_iterator i;
2739 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2740 set_bb_for_stmt (tsi_stmt (i), bb);
2741 }
2742 else
2743 {
2744 stmt_ann_t ann = get_stmt_ann (t);
2745 ann->bb = bb;
2746
2747 /* If the statement is a label, add the label to block-to-labels map
2748 so that we can speed up edge creation for GOTO_EXPRs. */
2749 if (TREE_CODE (t) == LABEL_EXPR)
2750 {
2751 int uid;
2752
2753 t = LABEL_EXPR_LABEL (t);
2754 uid = LABEL_DECL_UID (t);
2755 if (uid == -1)
2756 {
2757 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2758 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2759 if (old_len <= (unsigned) uid)
2760 {
2761 basic_block *addr;
2762 unsigned new_len = 3 * uid / 2;
2763
2764 VEC_safe_grow (basic_block, gc, label_to_block_map,
2765 new_len);
2766 addr = VEC_address (basic_block, label_to_block_map);
2767 memset (&addr[old_len],
2768 0, sizeof (basic_block) * (new_len - old_len));
2769 }
2770 }
2771 else
2772 /* We're moving an existing label. Make sure that we've
2773 removed it from the old block. */
2774 gcc_assert (!bb
2775 || !VEC_index (basic_block, label_to_block_map, uid));
2776 VEC_replace (basic_block, label_to_block_map, uid, bb);
2777 }
2778 }
2779 }
2780
2781 /* Faster version of set_bb_for_stmt that assume that statement is being moved
2782 from one basic block to another.
2783 For BB splitting we can run into quadratic case, so performance is quite
2784 important and knowing that the tables are big enough, change_bb_for_stmt
2785 can inline as leaf function. */
2786 static inline void
2787 change_bb_for_stmt (tree t, basic_block bb)
2788 {
2789 get_stmt_ann (t)->bb = bb;
2790 if (TREE_CODE (t) == LABEL_EXPR)
2791 VEC_replace (basic_block, label_to_block_map,
2792 LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
2793 }
2794
2795 /* Finds iterator for STMT. */
2796
2797 extern block_stmt_iterator
2798 bsi_for_stmt (tree stmt)
2799 {
2800 block_stmt_iterator bsi;
2801
2802 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2803 if (bsi_stmt (bsi) == stmt)
2804 return bsi;
2805
2806 gcc_unreachable ();
2807 }
2808
2809 /* Mark statement T as modified, and update it. */
2810 static inline void
2811 update_modified_stmts (tree t)
2812 {
2813 if (TREE_CODE (t) == STATEMENT_LIST)
2814 {
2815 tree_stmt_iterator i;
2816 tree stmt;
2817 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2818 {
2819 stmt = tsi_stmt (i);
2820 update_stmt_if_modified (stmt);
2821 }
2822 }
2823 else
2824 update_stmt_if_modified (t);
2825 }
2826
2827 /* Insert statement (or statement list) T before the statement
2828 pointed-to by iterator I. M specifies how to update iterator I
2829 after insertion (see enum bsi_iterator_update). */
2830
2831 void
2832 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2833 {
2834 set_bb_for_stmt (t, i->bb);
2835 update_modified_stmts (t);
2836 tsi_link_before (&i->tsi, t, m);
2837 }
2838
2839
2840 /* Insert statement (or statement list) T after the statement
2841 pointed-to by iterator I. M specifies how to update iterator I
2842 after insertion (see enum bsi_iterator_update). */
2843
2844 void
2845 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2846 {
2847 set_bb_for_stmt (t, i->bb);
2848 update_modified_stmts (t);
2849 tsi_link_after (&i->tsi, t, m);
2850 }
2851
2852
2853 /* Remove the statement pointed to by iterator I. The iterator is updated
2854 to the next statement.
2855
2856 When REMOVE_EH_INFO is true we remove the statement pointed to by
2857 iterator I from the EH tables. Otherwise we do not modify the EH
2858 tables.
2859
2860 Generally, REMOVE_EH_INFO should be true when the statement is going to
2861 be removed from the IL and not reinserted elsewhere. */
2862
2863 void
2864 bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
2865 {
2866 tree t = bsi_stmt (*i);
2867 set_bb_for_stmt (t, NULL);
2868 delink_stmt_imm_use (t);
2869 tsi_delink (&i->tsi);
2870 mark_stmt_modified (t);
2871 if (remove_eh_info)
2872 remove_stmt_from_eh_region (t);
2873 }
2874
2875
2876 /* Move the statement at FROM so it comes right after the statement at TO. */
2877
2878 void
2879 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2880 {
2881 tree stmt = bsi_stmt (*from);
2882 bsi_remove (from, false);
2883 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2884 }
2885
2886
2887 /* Move the statement at FROM so it comes right before the statement at TO. */
2888
2889 void
2890 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2891 {
2892 tree stmt = bsi_stmt (*from);
2893 bsi_remove (from, false);
2894 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2895 }
2896
2897
2898 /* Move the statement at FROM to the end of basic block BB. */
2899
2900 void
2901 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2902 {
2903 block_stmt_iterator last = bsi_last (bb);
2904
2905 /* Have to check bsi_end_p because it could be an empty block. */
2906 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2907 bsi_move_before (from, &last);
2908 else
2909 bsi_move_after (from, &last);
2910 }
2911
2912
2913 /* Replace the contents of the statement pointed to by iterator BSI
2914 with STMT. If UPDATE_EH_INFO is true, the exception handling
2915 information of the original statement is moved to the new statement. */
2916
2917 void
2918 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
2919 {
2920 int eh_region;
2921 tree orig_stmt = bsi_stmt (*bsi);
2922
2923 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2924 set_bb_for_stmt (stmt, bsi->bb);
2925
2926 /* Preserve EH region information from the original statement, if
2927 requested by the caller. */
2928 if (update_eh_info)
2929 {
2930 eh_region = lookup_stmt_eh_region (orig_stmt);
2931 if (eh_region >= 0)
2932 {
2933 remove_stmt_from_eh_region (orig_stmt);
2934 add_stmt_to_eh_region (stmt, eh_region);
2935 }
2936 }
2937
2938 delink_stmt_imm_use (orig_stmt);
2939 *bsi_stmt_ptr (*bsi) = stmt;
2940 mark_stmt_modified (stmt);
2941 update_modified_stmts (stmt);
2942 }
2943
2944
2945 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2946 is made to place the statement in an existing basic block, but
2947 sometimes that isn't possible. When it isn't possible, the edge is
2948 split and the statement is added to the new block.
2949
2950 In all cases, the returned *BSI points to the correct location. The
2951 return value is true if insertion should be done after the location,
2952 or false if it should be done before the location. If new basic block
2953 has to be created, it is stored in *NEW_BB. */
2954
2955 static bool
2956 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2957 basic_block *new_bb)
2958 {
2959 basic_block dest, src;
2960 tree tmp;
2961
2962 dest = e->dest;
2963 restart:
2964
2965 /* If the destination has one predecessor which has no PHI nodes,
2966 insert there. Except for the exit block.
2967
2968 The requirement for no PHI nodes could be relaxed. Basically we
2969 would have to examine the PHIs to prove that none of them used
2970 the value set by the statement we want to insert on E. That
2971 hardly seems worth the effort. */
2972 if (single_pred_p (dest)
2973 && ! phi_nodes (dest)
2974 && dest != EXIT_BLOCK_PTR)
2975 {
2976 *bsi = bsi_start (dest);
2977 if (bsi_end_p (*bsi))
2978 return true;
2979
2980 /* Make sure we insert after any leading labels. */
2981 tmp = bsi_stmt (*bsi);
2982 while (TREE_CODE (tmp) == LABEL_EXPR)
2983 {
2984 bsi_next (bsi);
2985 if (bsi_end_p (*bsi))
2986 break;
2987 tmp = bsi_stmt (*bsi);
2988 }
2989
2990 if (bsi_end_p (*bsi))
2991 {
2992 *bsi = bsi_last (dest);
2993 return true;
2994 }
2995 else
2996 return false;
2997 }
2998
2999 /* If the source has one successor, the edge is not abnormal and
3000 the last statement does not end a basic block, insert there.
3001 Except for the entry block. */
3002 src = e->src;
3003 if ((e->flags & EDGE_ABNORMAL) == 0
3004 && single_succ_p (src)
3005 && src != ENTRY_BLOCK_PTR)
3006 {
3007 *bsi = bsi_last (src);
3008 if (bsi_end_p (*bsi))
3009 return true;
3010
3011 tmp = bsi_stmt (*bsi);
3012 if (!stmt_ends_bb_p (tmp))
3013 return true;
3014
3015 /* Insert code just before returning the value. We may need to decompose
3016 the return in the case it contains non-trivial operand. */
3017 if (TREE_CODE (tmp) == RETURN_EXPR)
3018 {
3019 tree op = TREE_OPERAND (tmp, 0);
3020 if (op && !is_gimple_val (op))
3021 {
3022 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
3023 bsi_insert_before (bsi, op, BSI_NEW_STMT);
3024 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
3025 }
3026 bsi_prev (bsi);
3027 return true;
3028 }
3029 }
3030
3031 /* Otherwise, create a new basic block, and split this edge. */
3032 dest = split_edge (e);
3033 if (new_bb)
3034 *new_bb = dest;
3035 e = single_pred_edge (dest);
3036 goto restart;
3037 }
3038
3039
3040 /* This routine will commit all pending edge insertions, creating any new
3041 basic blocks which are necessary. */
3042
3043 void
3044 bsi_commit_edge_inserts (void)
3045 {
3046 basic_block bb;
3047 edge e;
3048 edge_iterator ei;
3049
3050 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
3051
3052 FOR_EACH_BB (bb)
3053 FOR_EACH_EDGE (e, ei, bb->succs)
3054 bsi_commit_one_edge_insert (e, NULL);
3055 }
3056
3057
3058 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
3059 to this block, otherwise set it to NULL. */
3060
3061 void
3062 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
3063 {
3064 if (new_bb)
3065 *new_bb = NULL;
3066 if (PENDING_STMT (e))
3067 {
3068 block_stmt_iterator bsi;
3069 tree stmt = PENDING_STMT (e);
3070
3071 PENDING_STMT (e) = NULL_TREE;
3072
3073 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
3074 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3075 else
3076 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3077 }
3078 }
3079
3080
3081 /* Add STMT to the pending list of edge E. No actual insertion is
3082 made until a call to bsi_commit_edge_inserts () is made. */
3083
3084 void
3085 bsi_insert_on_edge (edge e, tree stmt)
3086 {
3087 append_to_statement_list (stmt, &PENDING_STMT (e));
3088 }
3089
3090 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3091 block has to be created, it is returned. */
3092
3093 basic_block
3094 bsi_insert_on_edge_immediate (edge e, tree stmt)
3095 {
3096 block_stmt_iterator bsi;
3097 basic_block new_bb = NULL;
3098
3099 gcc_assert (!PENDING_STMT (e));
3100
3101 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3102 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3103 else
3104 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3105
3106 return new_bb;
3107 }
3108
3109 /*---------------------------------------------------------------------------
3110 Tree specific functions for CFG manipulation
3111 ---------------------------------------------------------------------------*/
3112
3113 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3114
3115 static void
3116 reinstall_phi_args (edge new_edge, edge old_edge)
3117 {
3118 tree var, phi;
3119
3120 if (!PENDING_STMT (old_edge))
3121 return;
3122
3123 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3124 var && phi;
3125 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3126 {
3127 tree result = TREE_PURPOSE (var);
3128 tree arg = TREE_VALUE (var);
3129
3130 gcc_assert (result == PHI_RESULT (phi));
3131
3132 add_phi_arg (phi, arg, new_edge);
3133 }
3134
3135 PENDING_STMT (old_edge) = NULL;
3136 }
3137
3138 /* Returns the basic block after which the new basic block created
3139 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3140 near its "logical" location. This is of most help to humans looking
3141 at debugging dumps. */
3142
3143 static basic_block
3144 split_edge_bb_loc (edge edge_in)
3145 {
3146 basic_block dest = edge_in->dest;
3147
3148 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3149 return edge_in->src;
3150 else
3151 return dest->prev_bb;
3152 }
3153
3154 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3155 Abort on abnormal edges. */
3156
3157 static basic_block
3158 tree_split_edge (edge edge_in)
3159 {
3160 basic_block new_bb, after_bb, dest;
3161 edge new_edge, e;
3162
3163 /* Abnormal edges cannot be split. */
3164 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3165
3166 dest = edge_in->dest;
3167
3168 after_bb = split_edge_bb_loc (edge_in);
3169
3170 new_bb = create_empty_bb (after_bb);
3171 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3172 new_bb->count = edge_in->count;
3173 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3174 new_edge->probability = REG_BR_PROB_BASE;
3175 new_edge->count = edge_in->count;
3176
3177 e = redirect_edge_and_branch (edge_in, new_bb);
3178 gcc_assert (e);
3179 reinstall_phi_args (new_edge, e);
3180
3181 return new_bb;
3182 }
3183
3184
3185 /* Return true when BB has label LABEL in it. */
3186
3187 static bool
3188 has_label_p (basic_block bb, tree label)
3189 {
3190 block_stmt_iterator bsi;
3191
3192 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3193 {
3194 tree stmt = bsi_stmt (bsi);
3195
3196 if (TREE_CODE (stmt) != LABEL_EXPR)
3197 return false;
3198 if (LABEL_EXPR_LABEL (stmt) == label)
3199 return true;
3200 }
3201 return false;
3202 }
3203
3204
3205 /* Callback for walk_tree, check that all elements with address taken are
3206 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3207 inside a PHI node. */
3208
3209 static tree
3210 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3211 {
3212 tree t = *tp, x;
3213 bool in_phi = (data != NULL);
3214
3215 if (TYPE_P (t))
3216 *walk_subtrees = 0;
3217
3218 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3219 #define CHECK_OP(N, MSG) \
3220 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3221 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3222
3223 switch (TREE_CODE (t))
3224 {
3225 case SSA_NAME:
3226 if (SSA_NAME_IN_FREE_LIST (t))
3227 {
3228 error ("SSA name in freelist but still referenced");
3229 return *tp;
3230 }
3231 break;
3232
3233 case ASSERT_EXPR:
3234 x = fold (ASSERT_EXPR_COND (t));
3235 if (x == boolean_false_node)
3236 {
3237 error ("ASSERT_EXPR with an always-false condition");
3238 return *tp;
3239 }
3240 break;
3241
3242 case MODIFY_EXPR:
3243 x = TREE_OPERAND (t, 0);
3244 if (TREE_CODE (x) == BIT_FIELD_REF
3245 && is_gimple_reg (TREE_OPERAND (x, 0)))
3246 {
3247 error ("GIMPLE register modified with BIT_FIELD_REF");
3248 return t;
3249 }
3250 break;
3251
3252 case ADDR_EXPR:
3253 {
3254 bool old_invariant;
3255 bool old_constant;
3256 bool old_side_effects;
3257 bool new_invariant;
3258 bool new_constant;
3259 bool new_side_effects;
3260
3261 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3262 dead PHIs that take the address of something. But if the PHI
3263 result is dead, the fact that it takes the address of anything
3264 is irrelevant. Because we can not tell from here if a PHI result
3265 is dead, we just skip this check for PHIs altogether. This means
3266 we may be missing "valid" checks, but what can you do?
3267 This was PR19217. */
3268 if (in_phi)
3269 break;
3270
3271 old_invariant = TREE_INVARIANT (t);
3272 old_constant = TREE_CONSTANT (t);
3273 old_side_effects = TREE_SIDE_EFFECTS (t);
3274
3275 recompute_tree_invariant_for_addr_expr (t);
3276 new_invariant = TREE_INVARIANT (t);
3277 new_side_effects = TREE_SIDE_EFFECTS (t);
3278 new_constant = TREE_CONSTANT (t);
3279
3280 if (old_invariant != new_invariant)
3281 {
3282 error ("invariant not recomputed when ADDR_EXPR changed");
3283 return t;
3284 }
3285
3286 if (old_constant != new_constant)
3287 {
3288 error ("constant not recomputed when ADDR_EXPR changed");
3289 return t;
3290 }
3291 if (old_side_effects != new_side_effects)
3292 {
3293 error ("side effects not recomputed when ADDR_EXPR changed");
3294 return t;
3295 }
3296
3297 /* Skip any references (they will be checked when we recurse down the
3298 tree) and ensure that any variable used as a prefix is marked
3299 addressable. */
3300 for (x = TREE_OPERAND (t, 0);
3301 handled_component_p (x);
3302 x = TREE_OPERAND (x, 0))
3303 ;
3304
3305 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3306 return NULL;
3307 if (!TREE_ADDRESSABLE (x))
3308 {
3309 error ("address taken, but ADDRESSABLE bit not set");
3310 return x;
3311 }
3312 break;
3313 }
3314
3315 case COND_EXPR:
3316 x = COND_EXPR_COND (t);
3317 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3318 {
3319 error ("non-boolean used in condition");
3320 return x;
3321 }
3322 if (!is_gimple_condexpr (x))
3323 {
3324 error ("invalid conditional operand");
3325 return x;
3326 }
3327 break;
3328
3329 case NOP_EXPR:
3330 case CONVERT_EXPR:
3331 case FIX_TRUNC_EXPR:
3332 case FLOAT_EXPR:
3333 case NEGATE_EXPR:
3334 case ABS_EXPR:
3335 case BIT_NOT_EXPR:
3336 case NON_LVALUE_EXPR:
3337 case TRUTH_NOT_EXPR:
3338 CHECK_OP (0, "invalid operand to unary operator");
3339 break;
3340
3341 case REALPART_EXPR:
3342 case IMAGPART_EXPR:
3343 case COMPONENT_REF:
3344 case ARRAY_REF:
3345 case ARRAY_RANGE_REF:
3346 case BIT_FIELD_REF:
3347 case VIEW_CONVERT_EXPR:
3348 /* We have a nest of references. Verify that each of the operands
3349 that determine where to reference is either a constant or a variable,
3350 verify that the base is valid, and then show we've already checked
3351 the subtrees. */
3352 while (handled_component_p (t))
3353 {
3354 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3355 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3356 else if (TREE_CODE (t) == ARRAY_REF
3357 || TREE_CODE (t) == ARRAY_RANGE_REF)
3358 {
3359 CHECK_OP (1, "invalid array index");
3360 if (TREE_OPERAND (t, 2))
3361 CHECK_OP (2, "invalid array lower bound");
3362 if (TREE_OPERAND (t, 3))
3363 CHECK_OP (3, "invalid array stride");
3364 }
3365 else if (TREE_CODE (t) == BIT_FIELD_REF)
3366 {
3367 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3368 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3369 }
3370
3371 t = TREE_OPERAND (t, 0);
3372 }
3373
3374 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3375 {
3376 error ("invalid reference prefix");
3377 return t;
3378 }
3379 *walk_subtrees = 0;
3380 break;
3381
3382 case LT_EXPR:
3383 case LE_EXPR:
3384 case GT_EXPR:
3385 case GE_EXPR:
3386 case EQ_EXPR:
3387 case NE_EXPR:
3388 case UNORDERED_EXPR:
3389 case ORDERED_EXPR:
3390 case UNLT_EXPR:
3391 case UNLE_EXPR:
3392 case UNGT_EXPR:
3393 case UNGE_EXPR:
3394 case UNEQ_EXPR:
3395 case LTGT_EXPR:
3396 case PLUS_EXPR:
3397 case MINUS_EXPR:
3398 case MULT_EXPR:
3399 case TRUNC_DIV_EXPR:
3400 case CEIL_DIV_EXPR:
3401 case FLOOR_DIV_EXPR:
3402 case ROUND_DIV_EXPR:
3403 case TRUNC_MOD_EXPR:
3404 case CEIL_MOD_EXPR:
3405 case FLOOR_MOD_EXPR:
3406 case ROUND_MOD_EXPR:
3407 case RDIV_EXPR:
3408 case EXACT_DIV_EXPR:
3409 case MIN_EXPR:
3410 case MAX_EXPR:
3411 case LSHIFT_EXPR:
3412 case RSHIFT_EXPR:
3413 case LROTATE_EXPR:
3414 case RROTATE_EXPR:
3415 case BIT_IOR_EXPR:
3416 case BIT_XOR_EXPR:
3417 case BIT_AND_EXPR:
3418 CHECK_OP (0, "invalid operand to binary operator");
3419 CHECK_OP (1, "invalid operand to binary operator");
3420 break;
3421
3422 default:
3423 break;
3424 }
3425 return NULL;
3426
3427 #undef CHECK_OP
3428 }
3429
3430
3431 /* Verify STMT, return true if STMT is not in GIMPLE form.
3432 TODO: Implement type checking. */
3433
3434 static bool
3435 verify_stmt (tree stmt, bool last_in_block)
3436 {
3437 tree addr;
3438
3439 if (OMP_DIRECTIVE_P (stmt))
3440 {
3441 /* OpenMP directives are validated by the FE and never operated
3442 on by the optimizers. Furthermore, OMP_FOR may contain
3443 non-gimple expressions when the main index variable has had
3444 its address taken. This does not affect the loop itself
3445 because the header of an OMP_FOR is merely used to determine
3446 how to setup the parallel iteration. */
3447 return false;
3448 }
3449
3450 if (!is_gimple_stmt (stmt))
3451 {
3452 error ("is not a valid GIMPLE statement");
3453 goto fail;
3454 }
3455
3456 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3457 if (addr)
3458 {
3459 debug_generic_stmt (addr);
3460 return true;
3461 }
3462
3463 /* If the statement is marked as part of an EH region, then it is
3464 expected that the statement could throw. Verify that when we
3465 have optimizations that simplify statements such that we prove
3466 that they cannot throw, that we update other data structures
3467 to match. */
3468 if (lookup_stmt_eh_region (stmt) >= 0)
3469 {
3470 if (!tree_could_throw_p (stmt))
3471 {
3472 error ("statement marked for throw, but doesn%'t");
3473 goto fail;
3474 }
3475 if (!last_in_block && tree_can_throw_internal (stmt))
3476 {
3477 error ("statement marked for throw in middle of block");
3478 goto fail;
3479 }
3480 }
3481
3482 return false;
3483
3484 fail:
3485 debug_generic_stmt (stmt);
3486 return true;
3487 }
3488
3489
3490 /* Return true when the T can be shared. */
3491
3492 static bool
3493 tree_node_can_be_shared (tree t)
3494 {
3495 if (IS_TYPE_OR_DECL_P (t)
3496 || is_gimple_min_invariant (t)
3497 || TREE_CODE (t) == SSA_NAME
3498 || t == error_mark_node
3499 || TREE_CODE (t) == IDENTIFIER_NODE)
3500 return true;
3501
3502 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3503 return true;
3504
3505 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3506 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3507 || TREE_CODE (t) == COMPONENT_REF
3508 || TREE_CODE (t) == REALPART_EXPR
3509 || TREE_CODE (t) == IMAGPART_EXPR)
3510 t = TREE_OPERAND (t, 0);
3511
3512 if (DECL_P (t))
3513 return true;
3514
3515 return false;
3516 }
3517
3518
3519 /* Called via walk_trees. Verify tree sharing. */
3520
3521 static tree
3522 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3523 {
3524 htab_t htab = (htab_t) data;
3525 void **slot;
3526
3527 if (tree_node_can_be_shared (*tp))
3528 {
3529 *walk_subtrees = false;
3530 return NULL;
3531 }
3532
3533 slot = htab_find_slot (htab, *tp, INSERT);
3534 if (*slot)
3535 return (tree) *slot;
3536 *slot = *tp;
3537
3538 return NULL;
3539 }
3540
3541
3542 /* Verify the GIMPLE statement chain. */
3543
3544 void
3545 verify_stmts (void)
3546 {
3547 basic_block bb;
3548 block_stmt_iterator bsi;
3549 bool err = false;
3550 htab_t htab;
3551 tree addr;
3552
3553 timevar_push (TV_TREE_STMT_VERIFY);
3554 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3555
3556 FOR_EACH_BB (bb)
3557 {
3558 tree phi;
3559 int i;
3560
3561 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3562 {
3563 int phi_num_args = PHI_NUM_ARGS (phi);
3564
3565 if (bb_for_stmt (phi) != bb)
3566 {
3567 error ("bb_for_stmt (phi) is set to a wrong basic block");
3568 err |= true;
3569 }
3570
3571 for (i = 0; i < phi_num_args; i++)
3572 {
3573 tree t = PHI_ARG_DEF (phi, i);
3574 tree addr;
3575
3576 /* Addressable variables do have SSA_NAMEs but they
3577 are not considered gimple values. */
3578 if (TREE_CODE (t) != SSA_NAME
3579 && TREE_CODE (t) != FUNCTION_DECL
3580 && !is_gimple_val (t))
3581 {
3582 error ("PHI def is not a GIMPLE value");
3583 debug_generic_stmt (phi);
3584 debug_generic_stmt (t);
3585 err |= true;
3586 }
3587
3588 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3589 if (addr)
3590 {
3591 debug_generic_stmt (addr);
3592 err |= true;
3593 }
3594
3595 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3596 if (addr)
3597 {
3598 error ("incorrect sharing of tree nodes");
3599 debug_generic_stmt (phi);
3600 debug_generic_stmt (addr);
3601 err |= true;
3602 }
3603 }
3604 }
3605
3606 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3607 {
3608 tree stmt = bsi_stmt (bsi);
3609
3610 if (bb_for_stmt (stmt) != bb)
3611 {
3612 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3613 err |= true;
3614 }
3615
3616 bsi_next (&bsi);
3617 err |= verify_stmt (stmt, bsi_end_p (bsi));
3618 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3619 if (addr)
3620 {
3621 error ("incorrect sharing of tree nodes");
3622 debug_generic_stmt (stmt);
3623 debug_generic_stmt (addr);
3624 err |= true;
3625 }
3626 }
3627 }
3628
3629 if (err)
3630 internal_error ("verify_stmts failed");
3631
3632 htab_delete (htab);
3633 timevar_pop (TV_TREE_STMT_VERIFY);
3634 }
3635
3636
3637 /* Verifies that the flow information is OK. */
3638
3639 static int
3640 tree_verify_flow_info (void)
3641 {
3642 int err = 0;
3643 basic_block bb;
3644 block_stmt_iterator bsi;
3645 tree stmt;
3646 edge e;
3647 edge_iterator ei;
3648
3649 if (ENTRY_BLOCK_PTR->stmt_list)
3650 {
3651 error ("ENTRY_BLOCK has a statement list associated with it");
3652 err = 1;
3653 }
3654
3655 if (EXIT_BLOCK_PTR->stmt_list)
3656 {
3657 error ("EXIT_BLOCK has a statement list associated with it");
3658 err = 1;
3659 }
3660
3661 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3662 if (e->flags & EDGE_FALLTHRU)
3663 {
3664 error ("fallthru to exit from bb %d", e->src->index);
3665 err = 1;
3666 }
3667
3668 FOR_EACH_BB (bb)
3669 {
3670 bool found_ctrl_stmt = false;
3671
3672 stmt = NULL_TREE;
3673
3674 /* Skip labels on the start of basic block. */
3675 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3676 {
3677 tree prev_stmt = stmt;
3678
3679 stmt = bsi_stmt (bsi);
3680
3681 if (TREE_CODE (stmt) != LABEL_EXPR)
3682 break;
3683
3684 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3685 {
3686 error ("nonlocal label ");
3687 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3688 fprintf (stderr, " is not first in a sequence of labels in bb %d",
3689 bb->index);
3690 err = 1;
3691 }
3692
3693 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3694 {
3695 error ("label ");
3696 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3697 fprintf (stderr, " to block does not match in bb %d",
3698 bb->index);
3699 err = 1;
3700 }
3701
3702 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3703 != current_function_decl)
3704 {
3705 error ("label ");
3706 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3707 fprintf (stderr, " has incorrect context in bb %d",
3708 bb->index);
3709 err = 1;
3710 }
3711 }
3712
3713 /* Verify that body of basic block BB is free of control flow. */
3714 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3715 {
3716 tree stmt = bsi_stmt (bsi);
3717
3718 if (found_ctrl_stmt)
3719 {
3720 error ("control flow in the middle of basic block %d",
3721 bb->index);
3722 err = 1;
3723 }
3724
3725 if (stmt_ends_bb_p (stmt))
3726 found_ctrl_stmt = true;
3727
3728 if (TREE_CODE (stmt) == LABEL_EXPR)
3729 {
3730 error ("label ");
3731 print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
3732 fprintf (stderr, " in the middle of basic block %d", bb->index);
3733 err = 1;
3734 }
3735 }
3736
3737 bsi = bsi_last (bb);
3738 if (bsi_end_p (bsi))
3739 continue;
3740
3741 stmt = bsi_stmt (bsi);
3742
3743 err |= verify_eh_edges (stmt);
3744
3745 if (is_ctrl_stmt (stmt))
3746 {
3747 FOR_EACH_EDGE (e, ei, bb->succs)
3748 if (e->flags & EDGE_FALLTHRU)
3749 {
3750 error ("fallthru edge after a control statement in bb %d",
3751 bb->index);
3752 err = 1;
3753 }
3754 }
3755
3756 if (TREE_CODE (stmt) != COND_EXPR)
3757 {
3758 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
3759 after anything else but if statement. */
3760 FOR_EACH_EDGE (e, ei, bb->succs)
3761 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
3762 {
3763 error ("true/false edge after a non-COND_EXPR in bb %d",
3764 bb->index);
3765 err = 1;
3766 }
3767 }
3768
3769 switch (TREE_CODE (stmt))
3770 {
3771 case COND_EXPR:
3772 {
3773 edge true_edge;
3774 edge false_edge;
3775 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3776 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3777 {
3778 error ("structured COND_EXPR at the end of bb %d", bb->index);
3779 err = 1;
3780 }
3781
3782 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3783
3784 if (!true_edge || !false_edge
3785 || !(true_edge->flags & EDGE_TRUE_VALUE)
3786 || !(false_edge->flags & EDGE_FALSE_VALUE)
3787 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3788 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3789 || EDGE_COUNT (bb->succs) >= 3)
3790 {
3791 error ("wrong outgoing edge flags at end of bb %d",
3792 bb->index);
3793 err = 1;
3794 }
3795
3796 if (!has_label_p (true_edge->dest,
3797 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3798 {
3799 error ("%<then%> label does not match edge at end of bb %d",
3800 bb->index);
3801 err = 1;
3802 }
3803
3804 if (!has_label_p (false_edge->dest,
3805 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3806 {
3807 error ("%<else%> label does not match edge at end of bb %d",
3808 bb->index);
3809 err = 1;
3810 }
3811 }
3812 break;
3813
3814 case GOTO_EXPR:
3815 if (simple_goto_p (stmt))
3816 {
3817 error ("explicit goto at end of bb %d", bb->index);
3818 err = 1;
3819 }
3820 else
3821 {
3822 /* FIXME. We should double check that the labels in the
3823 destination blocks have their address taken. */
3824 FOR_EACH_EDGE (e, ei, bb->succs)
3825 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3826 | EDGE_FALSE_VALUE))
3827 || !(e->flags & EDGE_ABNORMAL))
3828 {
3829 error ("wrong outgoing edge flags at end of bb %d",
3830 bb->index);
3831 err = 1;
3832 }
3833 }
3834 break;
3835
3836 case RETURN_EXPR:
3837 if (!single_succ_p (bb)
3838 || (single_succ_edge (bb)->flags
3839 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3840 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3841 {
3842 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3843 err = 1;
3844 }
3845 if (single_succ (bb) != EXIT_BLOCK_PTR)
3846 {
3847 error ("return edge does not point to exit in bb %d",
3848 bb->index);
3849 err = 1;
3850 }
3851 break;
3852
3853 case SWITCH_EXPR:
3854 {
3855 tree prev;
3856 edge e;
3857 size_t i, n;
3858 tree vec;
3859
3860 vec = SWITCH_LABELS (stmt);
3861 n = TREE_VEC_LENGTH (vec);
3862
3863 /* Mark all the destination basic blocks. */
3864 for (i = 0; i < n; ++i)
3865 {
3866 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3867 basic_block label_bb = label_to_block (lab);
3868
3869 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3870 label_bb->aux = (void *)1;
3871 }
3872
3873 /* Verify that the case labels are sorted. */
3874 prev = TREE_VEC_ELT (vec, 0);
3875 for (i = 1; i < n - 1; ++i)
3876 {
3877 tree c = TREE_VEC_ELT (vec, i);
3878 if (! CASE_LOW (c))
3879 {
3880 error ("found default case not at end of case vector");
3881 err = 1;
3882 continue;
3883 }
3884 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3885 {
3886 error ("case labels not sorted: ");
3887 print_generic_expr (stderr, prev, 0);
3888 fprintf (stderr," is greater than ");
3889 print_generic_expr (stderr, c, 0);
3890 fprintf (stderr," but comes before it.\n");
3891 err = 1;
3892 }
3893 prev = c;
3894 }
3895 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3896 {
3897 error ("no default case found at end of case vector");
3898 err = 1;
3899 }
3900
3901 FOR_EACH_EDGE (e, ei, bb->succs)
3902 {
3903 if (!e->dest->aux)
3904 {
3905 error ("extra outgoing edge %d->%d",
3906 bb->index, e->dest->index);
3907 err = 1;
3908 }
3909 e->dest->aux = (void *)2;
3910 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3911 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3912 {
3913 error ("wrong outgoing edge flags at end of bb %d",
3914 bb->index);
3915 err = 1;
3916 }
3917 }
3918
3919 /* Check that we have all of them. */
3920 for (i = 0; i < n; ++i)
3921 {
3922 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3923 basic_block label_bb = label_to_block (lab);
3924
3925 if (label_bb->aux != (void *)2)
3926 {
3927 error ("missing edge %i->%i",
3928 bb->index, label_bb->index);
3929 err = 1;
3930 }
3931 }
3932
3933 FOR_EACH_EDGE (e, ei, bb->succs)
3934 e->dest->aux = (void *)0;
3935 }
3936
3937 default: ;
3938 }
3939 }
3940
3941 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3942 verify_dominators (CDI_DOMINATORS);
3943
3944 return err;
3945 }
3946
3947
3948 /* Updates phi nodes after creating a forwarder block joined
3949 by edge FALLTHRU. */
3950
3951 static void
3952 tree_make_forwarder_block (edge fallthru)
3953 {
3954 edge e;
3955 edge_iterator ei;
3956 basic_block dummy, bb;
3957 tree phi, new_phi, var;
3958
3959 dummy = fallthru->src;
3960 bb = fallthru->dest;
3961
3962 if (single_pred_p (bb))
3963 return;
3964
3965 /* If we redirected a branch we must create new phi nodes at the
3966 start of BB. */
3967 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3968 {
3969 var = PHI_RESULT (phi);
3970 new_phi = create_phi_node (var, bb);
3971 SSA_NAME_DEF_STMT (var) = new_phi;
3972 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3973 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3974 }
3975
3976 /* Ensure that the PHI node chain is in the same order. */
3977 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3978
3979 /* Add the arguments we have stored on edges. */
3980 FOR_EACH_EDGE (e, ei, bb->preds)
3981 {
3982 if (e == fallthru)
3983 continue;
3984
3985 flush_pending_stmts (e);
3986 }
3987 }
3988
3989
3990 /* Return a non-special label in the head of basic block BLOCK.
3991 Create one if it doesn't exist. */
3992
3993 tree
3994 tree_block_label (basic_block bb)
3995 {
3996 block_stmt_iterator i, s = bsi_start (bb);
3997 bool first = true;
3998 tree label, stmt;
3999
4000 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
4001 {
4002 stmt = bsi_stmt (i);
4003 if (TREE_CODE (stmt) != LABEL_EXPR)
4004 break;
4005 label = LABEL_EXPR_LABEL (stmt);
4006 if (!DECL_NONLOCAL (label))
4007 {
4008 if (!first)
4009 bsi_move_before (&i, &s);
4010 return label;
4011 }
4012 }
4013
4014 label = create_artificial_label ();
4015 stmt = build1 (LABEL_EXPR, void_type_node, label);
4016 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
4017 return label;
4018 }
4019
4020
4021 /* Attempt to perform edge redirection by replacing a possibly complex
4022 jump instruction by a goto or by removing the jump completely.
4023 This can apply only if all edges now point to the same block. The
4024 parameters and return values are equivalent to
4025 redirect_edge_and_branch. */
4026
4027 static edge
4028 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
4029 {
4030 basic_block src = e->src;
4031 block_stmt_iterator b;
4032 tree stmt;
4033
4034 /* We can replace or remove a complex jump only when we have exactly
4035 two edges. */
4036 if (EDGE_COUNT (src->succs) != 2
4037 /* Verify that all targets will be TARGET. Specifically, the
4038 edge that is not E must also go to TARGET. */
4039 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4040 return NULL;
4041
4042 b = bsi_last (src);
4043 if (bsi_end_p (b))
4044 return NULL;
4045 stmt = bsi_stmt (b);
4046
4047 if (TREE_CODE (stmt) == COND_EXPR
4048 || TREE_CODE (stmt) == SWITCH_EXPR)
4049 {
4050 bsi_remove (&b, true);
4051 e = ssa_redirect_edge (e, target);
4052 e->flags = EDGE_FALLTHRU;
4053 return e;
4054 }
4055
4056 return NULL;
4057 }
4058
4059
4060 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4061 edge representing the redirected branch. */
4062
4063 static edge
4064 tree_redirect_edge_and_branch (edge e, basic_block dest)
4065 {
4066 basic_block bb = e->src;
4067 block_stmt_iterator bsi;
4068 edge ret;
4069 tree label, stmt;
4070
4071 if (e->flags & EDGE_ABNORMAL)
4072 return NULL;
4073
4074 if (e->src != ENTRY_BLOCK_PTR
4075 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
4076 return ret;
4077
4078 if (e->dest == dest)
4079 return NULL;
4080
4081 label = tree_block_label (dest);
4082
4083 bsi = bsi_last (bb);
4084 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
4085
4086 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
4087 {
4088 case COND_EXPR:
4089 stmt = (e->flags & EDGE_TRUE_VALUE
4090 ? COND_EXPR_THEN (stmt)
4091 : COND_EXPR_ELSE (stmt));
4092 GOTO_DESTINATION (stmt) = label;
4093 break;
4094
4095 case GOTO_EXPR:
4096 /* No non-abnormal edges should lead from a non-simple goto, and
4097 simple ones should be represented implicitly. */
4098 gcc_unreachable ();
4099
4100 case SWITCH_EXPR:
4101 {
4102 tree cases = get_cases_for_edge (e, stmt);
4103
4104 /* If we have a list of cases associated with E, then use it
4105 as it's a lot faster than walking the entire case vector. */
4106 if (cases)
4107 {
4108 edge e2 = find_edge (e->src, dest);
4109 tree last, first;
4110
4111 first = cases;
4112 while (cases)
4113 {
4114 last = cases;
4115 CASE_LABEL (cases) = label;
4116 cases = TREE_CHAIN (cases);
4117 }
4118
4119 /* If there was already an edge in the CFG, then we need
4120 to move all the cases associated with E to E2. */
4121 if (e2)
4122 {
4123 tree cases2 = get_cases_for_edge (e2, stmt);
4124
4125 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4126 TREE_CHAIN (cases2) = first;
4127 }
4128 }
4129 else
4130 {
4131 tree vec = SWITCH_LABELS (stmt);
4132 size_t i, n = TREE_VEC_LENGTH (vec);
4133
4134 for (i = 0; i < n; i++)
4135 {
4136 tree elt = TREE_VEC_ELT (vec, i);
4137
4138 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4139 CASE_LABEL (elt) = label;
4140 }
4141 }
4142
4143 break;
4144 }
4145
4146 case RETURN_EXPR:
4147 bsi_remove (&bsi, true);
4148 e->flags |= EDGE_FALLTHRU;
4149 break;
4150
4151 default:
4152 /* Otherwise it must be a fallthru edge, and we don't need to
4153 do anything besides redirecting it. */
4154 gcc_assert (e->flags & EDGE_FALLTHRU);
4155 break;
4156 }
4157
4158 /* Update/insert PHI nodes as necessary. */
4159
4160 /* Now update the edges in the CFG. */
4161 e = ssa_redirect_edge (e, dest);
4162
4163 return e;
4164 }
4165
4166
4167 /* Simple wrapper, as we can always redirect fallthru edges. */
4168
4169 static basic_block
4170 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4171 {
4172 e = tree_redirect_edge_and_branch (e, dest);
4173 gcc_assert (e);
4174
4175 return NULL;
4176 }
4177
4178
4179 /* Splits basic block BB after statement STMT (but at least after the
4180 labels). If STMT is NULL, BB is split just after the labels. */
4181
4182 static basic_block
4183 tree_split_block (basic_block bb, void *stmt)
4184 {
4185 block_stmt_iterator bsi;
4186 tree_stmt_iterator tsi_tgt;
4187 tree act;
4188 basic_block new_bb;
4189 edge e;
4190 edge_iterator ei;
4191
4192 new_bb = create_empty_bb (bb);
4193
4194 /* Redirect the outgoing edges. */
4195 new_bb->succs = bb->succs;
4196 bb->succs = NULL;
4197 FOR_EACH_EDGE (e, ei, new_bb->succs)
4198 e->src = new_bb;
4199
4200 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4201 stmt = NULL;
4202
4203 /* Move everything from BSI to the new basic block. */
4204 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4205 {
4206 act = bsi_stmt (bsi);
4207 if (TREE_CODE (act) == LABEL_EXPR)
4208 continue;
4209
4210 if (!stmt)
4211 break;
4212
4213 if (stmt == act)
4214 {
4215 bsi_next (&bsi);
4216 break;
4217 }
4218 }
4219
4220 if (bsi_end_p (bsi))
4221 return new_bb;
4222
4223 /* Split the statement list - avoid re-creating new containers as this
4224 brings ugly quadratic memory consumption in the inliner.
4225 (We are still quadratic since we need to update stmt BB pointers,
4226 sadly.) */
4227 new_bb->stmt_list = tsi_split_statement_list_before (&bsi.tsi);
4228 for (tsi_tgt = tsi_start (new_bb->stmt_list);
4229 !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
4230 change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
4231
4232 return new_bb;
4233 }
4234
4235
4236 /* Moves basic block BB after block AFTER. */
4237
4238 static bool
4239 tree_move_block_after (basic_block bb, basic_block after)
4240 {
4241 if (bb->prev_bb == after)
4242 return true;
4243
4244 unlink_block (bb);
4245 link_block (bb, after);
4246
4247 return true;
4248 }
4249
4250
4251 /* Return true if basic_block can be duplicated. */
4252
4253 static bool
4254 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4255 {
4256 return true;
4257 }
4258
4259
4260 /* Create a duplicate of the basic block BB. NOTE: This does not
4261 preserve SSA form. */
4262
4263 static basic_block
4264 tree_duplicate_bb (basic_block bb)
4265 {
4266 basic_block new_bb;
4267 block_stmt_iterator bsi, bsi_tgt;
4268 tree phi;
4269
4270 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4271
4272 /* Copy the PHI nodes. We ignore PHI node arguments here because
4273 the incoming edges have not been setup yet. */
4274 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4275 {
4276 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4277 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4278 }
4279
4280 /* Keep the chain of PHI nodes in the same order so that they can be
4281 updated by ssa_redirect_edge. */
4282 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4283
4284 bsi_tgt = bsi_start (new_bb);
4285 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4286 {
4287 def_operand_p def_p;
4288 ssa_op_iter op_iter;
4289 tree stmt, copy;
4290 int region;
4291
4292 stmt = bsi_stmt (bsi);
4293 if (TREE_CODE (stmt) == LABEL_EXPR)
4294 continue;
4295
4296 /* Create a new copy of STMT and duplicate STMT's virtual
4297 operands. */
4298 copy = unshare_expr (stmt);
4299 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4300 copy_virtual_operands (copy, stmt);
4301 region = lookup_stmt_eh_region (stmt);
4302 if (region >= 0)
4303 add_stmt_to_eh_region (copy, region);
4304
4305 /* Create new names for all the definitions created by COPY and
4306 add replacement mappings for each new name. */
4307 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4308 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4309 }
4310
4311 return new_bb;
4312 }
4313
4314
4315 /* Basic block BB_COPY was created by code duplication. Add phi node
4316 arguments for edges going out of BB_COPY. The blocks that were
4317 duplicated have BB_DUPLICATED set. */
4318
4319 void
4320 add_phi_args_after_copy_bb (basic_block bb_copy)
4321 {
4322 basic_block bb, dest;
4323 edge e, e_copy;
4324 edge_iterator ei;
4325 tree phi, phi_copy, phi_next, def;
4326
4327 bb = get_bb_original (bb_copy);
4328
4329 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4330 {
4331 if (!phi_nodes (e_copy->dest))
4332 continue;
4333
4334 if (e_copy->dest->flags & BB_DUPLICATED)
4335 dest = get_bb_original (e_copy->dest);
4336 else
4337 dest = e_copy->dest;
4338
4339 e = find_edge (bb, dest);
4340 if (!e)
4341 {
4342 /* During loop unrolling the target of the latch edge is copied.
4343 In this case we are not looking for edge to dest, but to
4344 duplicated block whose original was dest. */
4345 FOR_EACH_EDGE (e, ei, bb->succs)
4346 if ((e->dest->flags & BB_DUPLICATED)
4347 && get_bb_original (e->dest) == dest)
4348 break;
4349
4350 gcc_assert (e != NULL);
4351 }
4352
4353 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4354 phi;
4355 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4356 {
4357 phi_next = PHI_CHAIN (phi);
4358 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4359 add_phi_arg (phi_copy, def, e_copy);
4360 }
4361 }
4362 }
4363
4364 /* Blocks in REGION_COPY array of length N_REGION were created by
4365 duplication of basic blocks. Add phi node arguments for edges
4366 going from these blocks. */
4367
4368 void
4369 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4370 {
4371 unsigned i;
4372
4373 for (i = 0; i < n_region; i++)
4374 region_copy[i]->flags |= BB_DUPLICATED;
4375
4376 for (i = 0; i < n_region; i++)
4377 add_phi_args_after_copy_bb (region_copy[i]);
4378
4379 for (i = 0; i < n_region; i++)
4380 region_copy[i]->flags &= ~BB_DUPLICATED;
4381 }
4382
4383 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4384 important exit edge EXIT. By important we mean that no SSA name defined
4385 inside region is live over the other exit edges of the region. All entry
4386 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4387 to the duplicate of the region. SSA form, dominance and loop information
4388 is updated. The new basic blocks are stored to REGION_COPY in the same
4389 order as they had in REGION, provided that REGION_COPY is not NULL.
4390 The function returns false if it is unable to copy the region,
4391 true otherwise. */
4392
4393 bool
4394 tree_duplicate_sese_region (edge entry, edge exit,
4395 basic_block *region, unsigned n_region,
4396 basic_block *region_copy)
4397 {
4398 unsigned i, n_doms;
4399 bool free_region_copy = false, copying_header = false;
4400 struct loop *loop = entry->dest->loop_father;
4401 edge exit_copy;
4402 basic_block *doms;
4403 edge redirected;
4404 int total_freq = 0, entry_freq = 0;
4405 gcov_type total_count = 0, entry_count = 0;
4406
4407 if (!can_copy_bbs_p (region, n_region))
4408 return false;
4409
4410 /* Some sanity checking. Note that we do not check for all possible
4411 missuses of the functions. I.e. if you ask to copy something weird,
4412 it will work, but the state of structures probably will not be
4413 correct. */
4414 for (i = 0; i < n_region; i++)
4415 {
4416 /* We do not handle subloops, i.e. all the blocks must belong to the
4417 same loop. */
4418 if (region[i]->loop_father != loop)
4419 return false;
4420
4421 if (region[i] != entry->dest
4422 && region[i] == loop->header)
4423 return false;
4424 }
4425
4426 loop->copy = loop;
4427
4428 /* In case the function is used for loop header copying (which is the primary
4429 use), ensure that EXIT and its copy will be new latch and entry edges. */
4430 if (loop->header == entry->dest)
4431 {
4432 copying_header = true;
4433 loop->copy = loop->outer;
4434
4435 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4436 return false;
4437
4438 for (i = 0; i < n_region; i++)
4439 if (region[i] != exit->src
4440 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4441 return false;
4442 }
4443
4444 if (!region_copy)
4445 {
4446 region_copy = XNEWVEC (basic_block, n_region);
4447 free_region_copy = true;
4448 }
4449
4450 gcc_assert (!need_ssa_update_p ());
4451
4452 /* Record blocks outside the region that are dominated by something
4453 inside. */
4454 doms = XNEWVEC (basic_block, n_basic_blocks);
4455 initialize_original_copy_tables ();
4456
4457 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4458
4459 if (entry->dest->count)
4460 {
4461 total_count = entry->dest->count;
4462 entry_count = entry->count;
4463 /* Fix up corner cases, to avoid division by zero or creation of negative
4464 frequencies. */
4465 if (entry_count > total_count)
4466 entry_count = total_count;
4467 }
4468 else
4469 {
4470 total_freq = entry->dest->frequency;
4471 entry_freq = EDGE_FREQUENCY (entry);
4472 /* Fix up corner cases, to avoid division by zero or creation of negative
4473 frequencies. */
4474 if (total_freq == 0)
4475 total_freq = 1;
4476 else if (entry_freq > total_freq)
4477 entry_freq = total_freq;
4478 }
4479
4480 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4481 split_edge_bb_loc (entry));
4482 if (total_count)
4483 {
4484 scale_bbs_frequencies_gcov_type (region, n_region,
4485 total_count - entry_count,
4486 total_count);
4487 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4488 total_count);
4489 }
4490 else
4491 {
4492 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4493 total_freq);
4494 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4495 }
4496
4497 if (copying_header)
4498 {
4499 loop->header = exit->dest;
4500 loop->latch = exit->src;
4501 }
4502
4503 /* Redirect the entry and add the phi node arguments. */
4504 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4505 gcc_assert (redirected != NULL);
4506 flush_pending_stmts (entry);
4507
4508 /* Concerning updating of dominators: We must recount dominators
4509 for entry block and its copy. Anything that is outside of the
4510 region, but was dominated by something inside needs recounting as
4511 well. */
4512 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4513 doms[n_doms++] = get_bb_original (entry->dest);
4514 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4515 free (doms);
4516
4517 /* Add the other PHI node arguments. */
4518 add_phi_args_after_copy (region_copy, n_region);
4519
4520 /* Update the SSA web. */
4521 update_ssa (TODO_update_ssa);
4522
4523 if (free_region_copy)
4524 free (region_copy);
4525
4526 free_original_copy_tables ();
4527 return true;
4528 }
4529
4530 /*
4531 DEF_VEC_P(basic_block);
4532 DEF_VEC_ALLOC_P(basic_block,heap);
4533 */
4534
4535 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
4536 adding blocks when the dominator traversal reaches EXIT. This
4537 function silently assumes that ENTRY strictly dominates EXIT. */
4538
4539 static void
4540 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
4541 VEC(basic_block,heap) **bbs_p)
4542 {
4543 basic_block son;
4544
4545 for (son = first_dom_son (CDI_DOMINATORS, entry);
4546 son;
4547 son = next_dom_son (CDI_DOMINATORS, son))
4548 {
4549 VEC_safe_push (basic_block, heap, *bbs_p, son);
4550 if (son != exit)
4551 gather_blocks_in_sese_region (son, exit, bbs_p);
4552 }
4553 }
4554
4555
4556 struct move_stmt_d
4557 {
4558 tree block;
4559 tree from_context;
4560 tree to_context;
4561 bitmap vars_to_remove;
4562 htab_t new_label_map;
4563 bool remap_decls_p;
4564 };
4565
4566 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
4567 contained in *TP and change the DECL_CONTEXT of every local
4568 variable referenced in *TP. */
4569
4570 static tree
4571 move_stmt_r (tree *tp, int *walk_subtrees, void *data)
4572 {
4573 struct move_stmt_d *p = (struct move_stmt_d *) data;
4574 tree t = *tp;
4575
4576 if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
4577 TREE_BLOCK (t) = p->block;
4578
4579 if (OMP_DIRECTIVE_P (t)
4580 && TREE_CODE (t) != OMP_RETURN
4581 && TREE_CODE (t) != OMP_CONTINUE)
4582 {
4583 /* Do not remap variables inside OMP directives. Variables
4584 referenced in clauses and directive header belong to the
4585 parent function and should not be moved into the child
4586 function. */
4587 bool save_remap_decls_p = p->remap_decls_p;
4588 p->remap_decls_p = false;
4589 *walk_subtrees = 0;
4590
4591 walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
4592
4593 p->remap_decls_p = save_remap_decls_p;
4594 }
4595 else if (DECL_P (t) && DECL_CONTEXT (t) == p->from_context)
4596 {
4597 if (TREE_CODE (t) == LABEL_DECL)
4598 {
4599 if (p->new_label_map)
4600 {
4601 struct tree_map in, *out;
4602 in.from = t;
4603 out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
4604 if (out)
4605 *tp = t = out->to;
4606 }
4607
4608 DECL_CONTEXT (t) = p->to_context;
4609 }
4610 else if (p->remap_decls_p)
4611 {
4612 DECL_CONTEXT (t) = p->to_context;
4613
4614 if (TREE_CODE (t) == VAR_DECL)
4615 {
4616 struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
4617 f->unexpanded_var_list
4618 = tree_cons (0, t, f->unexpanded_var_list);
4619
4620 /* Mark T to be removed from the original function,
4621 otherwise it will be given a DECL_RTL when the
4622 original function is expanded. */
4623 bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
4624 }
4625 }
4626 }
4627 else if (TYPE_P (t))
4628 *walk_subtrees = 0;
4629
4630 return NULL_TREE;
4631 }
4632
4633
4634 /* Move basic block BB from function CFUN to function DEST_FN. The
4635 block is moved out of the original linked list and placed after
4636 block AFTER in the new list. Also, the block is removed from the
4637 original array of blocks and placed in DEST_FN's array of blocks.
4638 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
4639 updated to reflect the moved edges.
4640
4641 On exit, local variables that need to be removed from
4642 CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
4643
4644 static void
4645 move_block_to_fn (struct function *dest_cfun, basic_block bb,
4646 basic_block after, bool update_edge_count_p,
4647 bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
4648 {
4649 struct control_flow_graph *cfg;
4650 edge_iterator ei;
4651 edge e;
4652 block_stmt_iterator si;
4653 struct move_stmt_d d;
4654 unsigned old_len, new_len;
4655 basic_block *addr;
4656
4657 /* Link BB to the new linked list. */
4658 move_block_after (bb, after);
4659
4660 /* Update the edge count in the corresponding flowgraphs. */
4661 if (update_edge_count_p)
4662 FOR_EACH_EDGE (e, ei, bb->succs)
4663 {
4664 cfun->cfg->x_n_edges--;
4665 dest_cfun->cfg->x_n_edges++;
4666 }
4667
4668 /* Remove BB from the original basic block array. */
4669 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
4670 cfun->cfg->x_n_basic_blocks--;
4671
4672 /* Grow DEST_CFUN's basic block array if needed. */
4673 cfg = dest_cfun->cfg;
4674 cfg->x_n_basic_blocks++;
4675 if (bb->index > cfg->x_last_basic_block)
4676 cfg->x_last_basic_block = bb->index;
4677
4678 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
4679 if ((unsigned) cfg->x_last_basic_block >= old_len)
4680 {
4681 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
4682 VEC_safe_grow (basic_block, gc, cfg->x_basic_block_info, new_len);
4683 addr = VEC_address (basic_block, cfg->x_basic_block_info);
4684 memset (&addr[old_len], 0, sizeof (basic_block) * (new_len - old_len));
4685 }
4686
4687 VEC_replace (basic_block, cfg->x_basic_block_info,
4688 cfg->x_last_basic_block, bb);
4689
4690 /* The statements in BB need to be associated with a new TREE_BLOCK.
4691 Labels need to be associated with a new label-to-block map. */
4692 memset (&d, 0, sizeof (d));
4693 d.vars_to_remove = vars_to_remove;
4694
4695 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4696 {
4697 tree stmt = bsi_stmt (si);
4698 int region;
4699
4700 d.from_context = cfun->decl;
4701 d.to_context = dest_cfun->decl;
4702 d.remap_decls_p = true;
4703 d.new_label_map = new_label_map;
4704 if (TREE_BLOCK (stmt))
4705 d.block = DECL_INITIAL (dest_cfun->decl);
4706
4707 walk_tree (&stmt, move_stmt_r, &d, NULL);
4708
4709 if (TREE_CODE (stmt) == LABEL_EXPR)
4710 {
4711 tree label = LABEL_EXPR_LABEL (stmt);
4712 int uid = LABEL_DECL_UID (label);
4713
4714 gcc_assert (uid > -1);
4715
4716 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
4717 if (old_len <= (unsigned) uid)
4718 {
4719 new_len = 3 * uid / 2;
4720 VEC_safe_grow (basic_block, gc, cfg->x_label_to_block_map,
4721 new_len);
4722 addr = VEC_address (basic_block, cfg->x_label_to_block_map);
4723 memset (&addr[old_len], 0,
4724 sizeof (basic_block) * (new_len - old_len));
4725 }
4726
4727 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
4728 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
4729
4730 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
4731
4732 if (uid >= dest_cfun->last_label_uid)
4733 dest_cfun->last_label_uid = uid + 1;
4734 }
4735 else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
4736 TREE_OPERAND (stmt, 0) =
4737 build_int_cst (NULL_TREE,
4738 TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
4739 + eh_offset);
4740
4741 region = lookup_stmt_eh_region (stmt);
4742 if (region >= 0)
4743 {
4744 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
4745 remove_stmt_from_eh_region (stmt);
4746 }
4747 }
4748 }
4749
4750 /* Examine the statements in BB (which is in SRC_CFUN); find and return
4751 the outermost EH region. Use REGION as the incoming base EH region. */
4752
4753 static int
4754 find_outermost_region_in_block (struct function *src_cfun,
4755 basic_block bb, int region)
4756 {
4757 block_stmt_iterator si;
4758
4759 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
4760 {
4761 tree stmt = bsi_stmt (si);
4762 int stmt_region;
4763
4764 if (TREE_CODE (stmt) == RESX_EXPR)
4765 stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
4766 else
4767 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
4768 if (stmt_region > 0)
4769 {
4770 if (region < 0)
4771 region = stmt_region;
4772 else if (stmt_region != region)
4773 {
4774 region = eh_region_outermost (src_cfun, stmt_region, region);
4775 gcc_assert (region != -1);
4776 }
4777 }
4778 }
4779
4780 return region;
4781 }
4782
4783 static tree
4784 new_label_mapper (tree decl, void *data)
4785 {
4786 htab_t hash = (htab_t) data;
4787 struct tree_map *m;
4788 void **slot;
4789
4790 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
4791
4792 m = xmalloc (sizeof (struct tree_map));
4793 m->hash = DECL_UID (decl);
4794 m->from = decl;
4795 m->to = create_artificial_label ();
4796 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
4797
4798 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
4799 gcc_assert (*slot == NULL);
4800
4801 *slot = m;
4802
4803 return m->to;
4804 }
4805
4806 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
4807 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
4808 single basic block in the original CFG and the new basic block is
4809 returned. DEST_CFUN must not have a CFG yet.
4810
4811 Note that the region need not be a pure SESE region. Blocks inside
4812 the region may contain calls to abort/exit. The only restriction
4813 is that ENTRY_BB should be the only entry point and it must
4814 dominate EXIT_BB.
4815
4816 All local variables referenced in the region are assumed to be in
4817 the corresponding BLOCK_VARS and unexpanded variable lists
4818 associated with DEST_CFUN. */
4819
4820 basic_block
4821 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
4822 basic_block exit_bb)
4823 {
4824 VEC(basic_block,heap) *bbs;
4825 basic_block after, bb, *entry_pred, *exit_succ;
4826 struct function *saved_cfun;
4827 int *entry_flag, *exit_flag, eh_offset;
4828 unsigned i, num_entry_edges, num_exit_edges;
4829 edge e;
4830 edge_iterator ei;
4831 bitmap vars_to_remove;
4832 htab_t new_label_map;
4833
4834 saved_cfun = cfun;
4835
4836 /* Collect all the blocks in the region. Manually add ENTRY_BB
4837 because it won't be added by dfs_enumerate_from. */
4838 calculate_dominance_info (CDI_DOMINATORS);
4839
4840 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
4841 region. */
4842 gcc_assert (entry_bb != exit_bb
4843 && (!exit_bb
4844 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
4845
4846 bbs = NULL;
4847 VEC_safe_push (basic_block, heap, bbs, entry_bb);
4848 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
4849
4850 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
4851 the predecessor edges to ENTRY_BB and the successor edges to
4852 EXIT_BB so that we can re-attach them to the new basic block that
4853 will replace the region. */
4854 num_entry_edges = EDGE_COUNT (entry_bb->preds);
4855 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
4856 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
4857 i = 0;
4858 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
4859 {
4860 entry_flag[i] = e->flags;
4861 entry_pred[i++] = e->src;
4862 remove_edge (e);
4863 }
4864
4865 if (exit_bb)
4866 {
4867 num_exit_edges = EDGE_COUNT (exit_bb->succs);
4868 exit_succ = (basic_block *) xcalloc (num_exit_edges,
4869 sizeof (basic_block));
4870 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
4871 i = 0;
4872 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
4873 {
4874 exit_flag[i] = e->flags;
4875 exit_succ[i++] = e->dest;
4876 remove_edge (e);
4877 }
4878 }
4879 else
4880 {
4881 num_exit_edges = 0;
4882 exit_succ = NULL;
4883 exit_flag = NULL;
4884 }
4885
4886 /* Switch context to the child function to initialize DEST_FN's CFG. */
4887 gcc_assert (dest_cfun->cfg == NULL);
4888 cfun = dest_cfun;
4889
4890 init_empty_tree_cfg ();
4891
4892 /* Initialize EH information for the new function. */
4893 eh_offset = 0;
4894 new_label_map = NULL;
4895 if (saved_cfun->eh)
4896 {
4897 int region = -1;
4898
4899 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4900 region = find_outermost_region_in_block (saved_cfun, bb, region);
4901
4902 init_eh_for_function ();
4903 if (region != -1)
4904 {
4905 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
4906 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
4907 new_label_map, region, 0);
4908 }
4909 }
4910
4911 cfun = saved_cfun;
4912
4913 /* Move blocks from BBS into DEST_CFUN. */
4914 gcc_assert (VEC_length (basic_block, bbs) >= 2);
4915 after = dest_cfun->cfg->x_entry_block_ptr;
4916 vars_to_remove = BITMAP_ALLOC (NULL);
4917 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
4918 {
4919 /* No need to update edge counts on the last block. It has
4920 already been updated earlier when we detached the region from
4921 the original CFG. */
4922 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
4923 new_label_map, eh_offset);
4924 after = bb;
4925 }
4926
4927 if (new_label_map)
4928 htab_delete (new_label_map);
4929
4930 /* Remove the variables marked in VARS_TO_REMOVE from
4931 CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
4932 DECL_RTL in the context of CFUN. */
4933 if (!bitmap_empty_p (vars_to_remove))
4934 {
4935 tree *p;
4936
4937 for (p = &cfun->unexpanded_var_list; *p; )
4938 {
4939 tree var = TREE_VALUE (*p);
4940 if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
4941 {
4942 *p = TREE_CHAIN (*p);
4943 continue;
4944 }
4945
4946 p = &TREE_CHAIN (*p);
4947 }
4948 }
4949
4950 BITMAP_FREE (vars_to_remove);
4951
4952 /* Rewire the entry and exit blocks. The successor to the entry
4953 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
4954 the child function. Similarly, the predecessor of DEST_FN's
4955 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
4956 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
4957 various CFG manipulation function get to the right CFG.
4958
4959 FIXME, this is silly. The CFG ought to become a parameter to
4960 these helpers. */
4961 cfun = dest_cfun;
4962 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
4963 if (exit_bb)
4964 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
4965 cfun = saved_cfun;
4966
4967 /* Back in the original function, the SESE region has disappeared,
4968 create a new basic block in its place. */
4969 bb = create_empty_bb (entry_pred[0]);
4970 for (i = 0; i < num_entry_edges; i++)
4971 make_edge (entry_pred[i], bb, entry_flag[i]);
4972
4973 for (i = 0; i < num_exit_edges; i++)
4974 make_edge (bb, exit_succ[i], exit_flag[i]);
4975
4976 if (exit_bb)
4977 {
4978 free (exit_flag);
4979 free (exit_succ);
4980 }
4981 free (entry_flag);
4982 free (entry_pred);
4983 free_dominance_info (CDI_DOMINATORS);
4984 free_dominance_info (CDI_POST_DOMINATORS);
4985 VEC_free (basic_block, heap, bbs);
4986
4987 return bb;
4988 }
4989
4990
4991 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4992
4993 void
4994 dump_function_to_file (tree fn, FILE *file, int flags)
4995 {
4996 tree arg, vars, var;
4997 bool ignore_topmost_bind = false, any_var = false;
4998 basic_block bb;
4999 tree chain;
5000 struct function *saved_cfun;
5001
5002 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
5003
5004 arg = DECL_ARGUMENTS (fn);
5005 while (arg)
5006 {
5007 print_generic_expr (file, arg, dump_flags);
5008 if (TREE_CHAIN (arg))
5009 fprintf (file, ", ");
5010 arg = TREE_CHAIN (arg);
5011 }
5012 fprintf (file, ")\n");
5013
5014 if (flags & TDF_DETAILS)
5015 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
5016 if (flags & TDF_RAW)
5017 {
5018 dump_node (fn, TDF_SLIM | flags, file);
5019 return;
5020 }
5021
5022 /* Switch CFUN to point to FN. */
5023 saved_cfun = cfun;
5024 cfun = DECL_STRUCT_FUNCTION (fn);
5025
5026 /* When GIMPLE is lowered, the variables are no longer available in
5027 BIND_EXPRs, so display them separately. */
5028 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
5029 {
5030 ignore_topmost_bind = true;
5031
5032 fprintf (file, "{\n");
5033 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
5034 {
5035 var = TREE_VALUE (vars);
5036
5037 print_generic_decl (file, var, flags);
5038 fprintf (file, "\n");
5039
5040 any_var = true;
5041 }
5042 }
5043
5044 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
5045 {
5046 /* Make a CFG based dump. */
5047 check_bb_profile (ENTRY_BLOCK_PTR, file);
5048 if (!ignore_topmost_bind)
5049 fprintf (file, "{\n");
5050
5051 if (any_var && n_basic_blocks)
5052 fprintf (file, "\n");
5053
5054 FOR_EACH_BB (bb)
5055 dump_generic_bb (file, bb, 2, flags);
5056
5057 fprintf (file, "}\n");
5058 check_bb_profile (EXIT_BLOCK_PTR, file);
5059 }
5060 else
5061 {
5062 int indent;
5063
5064 /* Make a tree based dump. */
5065 chain = DECL_SAVED_TREE (fn);
5066
5067 if (chain && TREE_CODE (chain) == BIND_EXPR)
5068 {
5069 if (ignore_topmost_bind)
5070 {
5071 chain = BIND_EXPR_BODY (chain);
5072 indent = 2;
5073 }
5074 else
5075 indent = 0;
5076 }
5077 else
5078 {
5079 if (!ignore_topmost_bind)
5080 fprintf (file, "{\n");
5081 indent = 2;
5082 }
5083
5084 if (any_var)
5085 fprintf (file, "\n");
5086
5087 print_generic_stmt_indented (file, chain, flags, indent);
5088 if (ignore_topmost_bind)
5089 fprintf (file, "}\n");
5090 }
5091
5092 fprintf (file, "\n\n");
5093
5094 /* Restore CFUN. */
5095 cfun = saved_cfun;
5096 }
5097
5098
5099 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
5100
5101 void
5102 debug_function (tree fn, int flags)
5103 {
5104 dump_function_to_file (fn, stderr, flags);
5105 }
5106
5107
5108 /* Pretty print of the loops intermediate representation. */
5109 static void print_loop (FILE *, struct loop *, int);
5110 static void print_pred_bbs (FILE *, basic_block bb);
5111 static void print_succ_bbs (FILE *, basic_block bb);
5112
5113
5114 /* Print on FILE the indexes for the predecessors of basic_block BB. */
5115
5116 static void
5117 print_pred_bbs (FILE *file, basic_block bb)
5118 {
5119 edge e;
5120 edge_iterator ei;
5121
5122 FOR_EACH_EDGE (e, ei, bb->preds)
5123 fprintf (file, "bb_%d ", e->src->index);
5124 }
5125
5126
5127 /* Print on FILE the indexes for the successors of basic_block BB. */
5128
5129 static void
5130 print_succ_bbs (FILE *file, basic_block bb)
5131 {
5132 edge e;
5133 edge_iterator ei;
5134
5135 FOR_EACH_EDGE (e, ei, bb->succs)
5136 fprintf (file, "bb_%d ", e->dest->index);
5137 }
5138
5139
5140 /* Pretty print LOOP on FILE, indented INDENT spaces. */
5141
5142 static void
5143 print_loop (FILE *file, struct loop *loop, int indent)
5144 {
5145 char *s_indent;
5146 basic_block bb;
5147
5148 if (loop == NULL)
5149 return;
5150
5151 s_indent = (char *) alloca ((size_t) indent + 1);
5152 memset ((void *) s_indent, ' ', (size_t) indent);
5153 s_indent[indent] = '\0';
5154
5155 /* Print the loop's header. */
5156 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
5157
5158 /* Print the loop's body. */
5159 fprintf (file, "%s{\n", s_indent);
5160 FOR_EACH_BB (bb)
5161 if (bb->loop_father == loop)
5162 {
5163 /* Print the basic_block's header. */
5164 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
5165 print_pred_bbs (file, bb);
5166 fprintf (file, "}, succs = {");
5167 print_succ_bbs (file, bb);
5168 fprintf (file, "})\n");
5169
5170 /* Print the basic_block's body. */
5171 fprintf (file, "%s {\n", s_indent);
5172 tree_dump_bb (bb, file, indent + 4);
5173 fprintf (file, "%s }\n", s_indent);
5174 }
5175
5176 print_loop (file, loop->inner, indent + 2);
5177 fprintf (file, "%s}\n", s_indent);
5178 print_loop (file, loop->next, indent);
5179 }
5180
5181
5182 /* Follow a CFG edge from the entry point of the program, and on entry
5183 of a loop, pretty print the loop structure on FILE. */
5184
5185 void
5186 print_loop_ir (FILE *file)
5187 {
5188 basic_block bb;
5189
5190 bb = BASIC_BLOCK (NUM_FIXED_BLOCKS);
5191 if (bb && bb->loop_father)
5192 print_loop (file, bb->loop_father, 0);
5193 }
5194
5195
5196 /* Debugging loops structure at tree level. */
5197
5198 void
5199 debug_loop_ir (void)
5200 {
5201 print_loop_ir (stderr);
5202 }
5203
5204
5205 /* Return true if BB ends with a call, possibly followed by some
5206 instructions that must stay with the call. Return false,
5207 otherwise. */
5208
5209 static bool
5210 tree_block_ends_with_call_p (basic_block bb)
5211 {
5212 block_stmt_iterator bsi = bsi_last (bb);
5213 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
5214 }
5215
5216
5217 /* Return true if BB ends with a conditional branch. Return false,
5218 otherwise. */
5219
5220 static bool
5221 tree_block_ends_with_condjump_p (basic_block bb)
5222 {
5223 tree stmt = last_stmt (bb);
5224 return (stmt && TREE_CODE (stmt) == COND_EXPR);
5225 }
5226
5227
5228 /* Return true if we need to add fake edge to exit at statement T.
5229 Helper function for tree_flow_call_edges_add. */
5230
5231 static bool
5232 need_fake_edge_p (tree t)
5233 {
5234 tree call;
5235
5236 /* NORETURN and LONGJMP calls already have an edge to exit.
5237 CONST and PURE calls do not need one.
5238 We don't currently check for CONST and PURE here, although
5239 it would be a good idea, because those attributes are
5240 figured out from the RTL in mark_constant_function, and
5241 the counter incrementation code from -fprofile-arcs
5242 leads to different results from -fbranch-probabilities. */
5243 call = get_call_expr_in (t);
5244 if (call
5245 && !(call_expr_flags (call) & ECF_NORETURN))
5246 return true;
5247
5248 if (TREE_CODE (t) == ASM_EXPR
5249 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
5250 return true;
5251
5252 return false;
5253 }
5254
5255
5256 /* Add fake edges to the function exit for any non constant and non
5257 noreturn calls, volatile inline assembly in the bitmap of blocks
5258 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
5259 the number of blocks that were split.
5260
5261 The goal is to expose cases in which entering a basic block does
5262 not imply that all subsequent instructions must be executed. */
5263
5264 static int
5265 tree_flow_call_edges_add (sbitmap blocks)
5266 {
5267 int i;
5268 int blocks_split = 0;
5269 int last_bb = last_basic_block;
5270 bool check_last_block = false;
5271
5272 if (n_basic_blocks == NUM_FIXED_BLOCKS)
5273 return 0;
5274
5275 if (! blocks)
5276 check_last_block = true;
5277 else
5278 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
5279
5280 /* In the last basic block, before epilogue generation, there will be
5281 a fallthru edge to EXIT. Special care is required if the last insn
5282 of the last basic block is a call because make_edge folds duplicate
5283 edges, which would result in the fallthru edge also being marked
5284 fake, which would result in the fallthru edge being removed by
5285 remove_fake_edges, which would result in an invalid CFG.
5286
5287 Moreover, we can't elide the outgoing fake edge, since the block
5288 profiler needs to take this into account in order to solve the minimal
5289 spanning tree in the case that the call doesn't return.
5290
5291 Handle this by adding a dummy instruction in a new last basic block. */
5292 if (check_last_block)
5293 {
5294 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
5295 block_stmt_iterator bsi = bsi_last (bb);
5296 tree t = NULL_TREE;
5297 if (!bsi_end_p (bsi))
5298 t = bsi_stmt (bsi);
5299
5300 if (t && need_fake_edge_p (t))
5301 {
5302 edge e;
5303
5304 e = find_edge (bb, EXIT_BLOCK_PTR);
5305 if (e)
5306 {
5307 bsi_insert_on_edge (e, build_empty_stmt ());
5308 bsi_commit_edge_inserts ();
5309 }
5310 }
5311 }
5312
5313 /* Now add fake edges to the function exit for any non constant
5314 calls since there is no way that we can determine if they will
5315 return or not... */
5316 for (i = 0; i < last_bb; i++)
5317 {
5318 basic_block bb = BASIC_BLOCK (i);
5319 block_stmt_iterator bsi;
5320 tree stmt, last_stmt;
5321
5322 if (!bb)
5323 continue;
5324
5325 if (blocks && !TEST_BIT (blocks, i))
5326 continue;
5327
5328 bsi = bsi_last (bb);
5329 if (!bsi_end_p (bsi))
5330 {
5331 last_stmt = bsi_stmt (bsi);
5332 do
5333 {
5334 stmt = bsi_stmt (bsi);
5335 if (need_fake_edge_p (stmt))
5336 {
5337 edge e;
5338 /* The handling above of the final block before the
5339 epilogue should be enough to verify that there is
5340 no edge to the exit block in CFG already.
5341 Calling make_edge in such case would cause us to
5342 mark that edge as fake and remove it later. */
5343 #ifdef ENABLE_CHECKING
5344 if (stmt == last_stmt)
5345 {
5346 e = find_edge (bb, EXIT_BLOCK_PTR);
5347 gcc_assert (e == NULL);
5348 }
5349 #endif
5350
5351 /* Note that the following may create a new basic block
5352 and renumber the existing basic blocks. */
5353 if (stmt != last_stmt)
5354 {
5355 e = split_block (bb, stmt);
5356 if (e)
5357 blocks_split++;
5358 }
5359 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
5360 }
5361 bsi_prev (&bsi);
5362 }
5363 while (!bsi_end_p (bsi));
5364 }
5365 }
5366
5367 if (blocks_split)
5368 verify_flow_info ();
5369
5370 return blocks_split;
5371 }
5372
5373 /* Purge dead abnormal call edges from basic block BB. */
5374
5375 bool
5376 tree_purge_dead_abnormal_call_edges (basic_block bb)
5377 {
5378 bool changed = tree_purge_dead_eh_edges (bb);
5379
5380 if (current_function_has_nonlocal_label)
5381 {
5382 tree stmt = last_stmt (bb);
5383 edge_iterator ei;
5384 edge e;
5385
5386 if (!(stmt && tree_can_make_abnormal_goto (stmt)))
5387 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5388 {
5389 if (e->flags & EDGE_ABNORMAL)
5390 {
5391 remove_edge (e);
5392 changed = true;
5393 }
5394 else
5395 ei_next (&ei);
5396 }
5397
5398 /* See tree_purge_dead_eh_edges below. */
5399 if (changed)
5400 free_dominance_info (CDI_DOMINATORS);
5401 }
5402
5403 return changed;
5404 }
5405
5406 /* Purge dead EH edges from basic block BB. */
5407
5408 bool
5409 tree_purge_dead_eh_edges (basic_block bb)
5410 {
5411 bool changed = false;
5412 edge e;
5413 edge_iterator ei;
5414 tree stmt = last_stmt (bb);
5415
5416 if (stmt && tree_can_throw_internal (stmt))
5417 return false;
5418
5419 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5420 {
5421 if (e->flags & EDGE_EH)
5422 {
5423 remove_edge (e);
5424 changed = true;
5425 }
5426 else
5427 ei_next (&ei);
5428 }
5429
5430 /* Removal of dead EH edges might change dominators of not
5431 just immediate successors. E.g. when bb1 is changed so that
5432 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
5433 eh edges purged by this function in:
5434 0
5435 / \
5436 v v
5437 1-->2
5438 / \ |
5439 v v |
5440 3-->4 |
5441 \ v
5442 --->5
5443 |
5444 -
5445 idom(bb5) must be recomputed. For now just free the dominance
5446 info. */
5447 if (changed)
5448 free_dominance_info (CDI_DOMINATORS);
5449
5450 return changed;
5451 }
5452
5453 bool
5454 tree_purge_all_dead_eh_edges (bitmap blocks)
5455 {
5456 bool changed = false;
5457 unsigned i;
5458 bitmap_iterator bi;
5459
5460 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
5461 {
5462 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
5463 }
5464
5465 return changed;
5466 }
5467
5468 /* This function is called whenever a new edge is created or
5469 redirected. */
5470
5471 static void
5472 tree_execute_on_growing_pred (edge e)
5473 {
5474 basic_block bb = e->dest;
5475
5476 if (phi_nodes (bb))
5477 reserve_phi_args_for_new_edge (bb);
5478 }
5479
5480 /* This function is called immediately before edge E is removed from
5481 the edge vector E->dest->preds. */
5482
5483 static void
5484 tree_execute_on_shrinking_pred (edge e)
5485 {
5486 if (phi_nodes (e->dest))
5487 remove_phi_args (e);
5488 }
5489
5490 /*---------------------------------------------------------------------------
5491 Helper functions for Loop versioning
5492 ---------------------------------------------------------------------------*/
5493
5494 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
5495 of 'first'. Both of them are dominated by 'new_head' basic block. When
5496 'new_head' was created by 'second's incoming edge it received phi arguments
5497 on the edge by split_edge(). Later, additional edge 'e' was created to
5498 connect 'new_head' and 'first'. Now this routine adds phi args on this
5499 additional edge 'e' that new_head to second edge received as part of edge
5500 splitting.
5501 */
5502
5503 static void
5504 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
5505 basic_block new_head, edge e)
5506 {
5507 tree phi1, phi2;
5508 edge e2 = find_edge (new_head, second);
5509
5510 /* Because NEW_HEAD has been created by splitting SECOND's incoming
5511 edge, we should always have an edge from NEW_HEAD to SECOND. */
5512 gcc_assert (e2 != NULL);
5513
5514 /* Browse all 'second' basic block phi nodes and add phi args to
5515 edge 'e' for 'first' head. PHI args are always in correct order. */
5516
5517 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
5518 phi2 && phi1;
5519 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
5520 {
5521 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
5522 add_phi_arg (phi1, def, e);
5523 }
5524 }
5525
5526 /* Adds a if else statement to COND_BB with condition COND_EXPR.
5527 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
5528 the destination of the ELSE part. */
5529 static void
5530 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
5531 basic_block cond_bb, void *cond_e)
5532 {
5533 block_stmt_iterator bsi;
5534 tree goto1 = NULL_TREE;
5535 tree goto2 = NULL_TREE;
5536 tree new_cond_expr = NULL_TREE;
5537 tree cond_expr = (tree) cond_e;
5538 edge e0;
5539
5540 /* Build new conditional expr */
5541 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
5542 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
5543 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
5544
5545 /* Add new cond in cond_bb. */
5546 bsi = bsi_start (cond_bb);
5547 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
5548 /* Adjust edges appropriately to connect new head with first head
5549 as well as second head. */
5550 e0 = single_succ_edge (cond_bb);
5551 e0->flags &= ~EDGE_FALLTHRU;
5552 e0->flags |= EDGE_FALSE_VALUE;
5553 }
5554
5555 struct cfg_hooks tree_cfg_hooks = {
5556 "tree",
5557 tree_verify_flow_info,
5558 tree_dump_bb, /* dump_bb */
5559 create_bb, /* create_basic_block */
5560 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
5561 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
5562 remove_bb, /* delete_basic_block */
5563 tree_split_block, /* split_block */
5564 tree_move_block_after, /* move_block_after */
5565 tree_can_merge_blocks_p, /* can_merge_blocks_p */
5566 tree_merge_blocks, /* merge_blocks */
5567 tree_predict_edge, /* predict_edge */
5568 tree_predicted_by_p, /* predicted_by_p */
5569 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
5570 tree_duplicate_bb, /* duplicate_block */
5571 tree_split_edge, /* split_edge */
5572 tree_make_forwarder_block, /* make_forward_block */
5573 NULL, /* tidy_fallthru_edge */
5574 tree_block_ends_with_call_p, /* block_ends_with_call_p */
5575 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
5576 tree_flow_call_edges_add, /* flow_call_edges_add */
5577 tree_execute_on_growing_pred, /* execute_on_growing_pred */
5578 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
5579 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
5580 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5581 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
5582 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
5583 flush_pending_stmts /* flush_pending_stmts */
5584 };
5585
5586
5587 /* Split all critical edges. */
5588
5589 static unsigned int
5590 split_critical_edges (void)
5591 {
5592 basic_block bb;
5593 edge e;
5594 edge_iterator ei;
5595
5596 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
5597 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
5598 mappings around the calls to split_edge. */
5599 start_recording_case_labels ();
5600 FOR_ALL_BB (bb)
5601 {
5602 FOR_EACH_EDGE (e, ei, bb->succs)
5603 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
5604 {
5605 split_edge (e);
5606 }
5607 }
5608 end_recording_case_labels ();
5609 return 0;
5610 }
5611
5612 struct tree_opt_pass pass_split_crit_edges =
5613 {
5614 "crited", /* name */
5615 NULL, /* gate */
5616 split_critical_edges, /* execute */
5617 NULL, /* sub */
5618 NULL, /* next */
5619 0, /* static_pass_number */
5620 TV_TREE_SPLIT_EDGES, /* tv_id */
5621 PROP_cfg, /* properties required */
5622 PROP_no_crit_edges, /* properties_provided */
5623 0, /* properties_destroyed */
5624 0, /* todo_flags_start */
5625 TODO_dump_func, /* todo_flags_finish */
5626 0 /* letter */
5627 };
5628
5629 \f
5630 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5631 a temporary, make sure and register it to be renamed if necessary,
5632 and finally return the temporary. Put the statements to compute
5633 EXP before the current statement in BSI. */
5634
5635 tree
5636 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5637 {
5638 tree t, new_stmt, orig_stmt;
5639
5640 if (is_gimple_val (exp))
5641 return exp;
5642
5643 t = make_rename_temp (type, NULL);
5644 new_stmt = build2 (MODIFY_EXPR, type, t, exp);
5645
5646 orig_stmt = bsi_stmt (*bsi);
5647 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5648 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5649
5650 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5651 if (in_ssa_p)
5652 mark_new_vars_to_rename (new_stmt);
5653
5654 return t;
5655 }
5656
5657 /* Build a ternary operation and gimplify it. Emit code before BSI.
5658 Return the gimple_val holding the result. */
5659
5660 tree
5661 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5662 tree type, tree a, tree b, tree c)
5663 {
5664 tree ret;
5665
5666 ret = fold_build3 (code, type, a, b, c);
5667 STRIP_NOPS (ret);
5668
5669 return gimplify_val (bsi, type, ret);
5670 }
5671
5672 /* Build a binary operation and gimplify it. Emit code before BSI.
5673 Return the gimple_val holding the result. */
5674
5675 tree
5676 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5677 tree type, tree a, tree b)
5678 {
5679 tree ret;
5680
5681 ret = fold_build2 (code, type, a, b);
5682 STRIP_NOPS (ret);
5683
5684 return gimplify_val (bsi, type, ret);
5685 }
5686
5687 /* Build a unary operation and gimplify it. Emit code before BSI.
5688 Return the gimple_val holding the result. */
5689
5690 tree
5691 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5692 tree a)
5693 {
5694 tree ret;
5695
5696 ret = fold_build1 (code, type, a);
5697 STRIP_NOPS (ret);
5698
5699 return gimplify_val (bsi, type, ret);
5700 }
5701
5702
5703 \f
5704 /* Emit return warnings. */
5705
5706 static unsigned int
5707 execute_warn_function_return (void)
5708 {
5709 #ifdef USE_MAPPED_LOCATION
5710 source_location location;
5711 #else
5712 location_t *locus;
5713 #endif
5714 tree last;
5715 edge e;
5716 edge_iterator ei;
5717
5718 /* If we have a path to EXIT, then we do return. */
5719 if (TREE_THIS_VOLATILE (cfun->decl)
5720 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5721 {
5722 #ifdef USE_MAPPED_LOCATION
5723 location = UNKNOWN_LOCATION;
5724 #else
5725 locus = NULL;
5726 #endif
5727 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5728 {
5729 last = last_stmt (e->src);
5730 if (TREE_CODE (last) == RETURN_EXPR
5731 #ifdef USE_MAPPED_LOCATION
5732 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5733 #else
5734 && (locus = EXPR_LOCUS (last)) != NULL)
5735 #endif
5736 break;
5737 }
5738 #ifdef USE_MAPPED_LOCATION
5739 if (location == UNKNOWN_LOCATION)
5740 location = cfun->function_end_locus;
5741 warning (0, "%H%<noreturn%> function does return", &location);
5742 #else
5743 if (!locus)
5744 locus = &cfun->function_end_locus;
5745 warning (0, "%H%<noreturn%> function does return", locus);
5746 #endif
5747 }
5748
5749 /* If we see "return;" in some basic block, then we do reach the end
5750 without returning a value. */
5751 else if (warn_return_type
5752 && !TREE_NO_WARNING (cfun->decl)
5753 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5754 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5755 {
5756 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5757 {
5758 tree last = last_stmt (e->src);
5759 if (TREE_CODE (last) == RETURN_EXPR
5760 && TREE_OPERAND (last, 0) == NULL
5761 && !TREE_NO_WARNING (last))
5762 {
5763 #ifdef USE_MAPPED_LOCATION
5764 location = EXPR_LOCATION (last);
5765 if (location == UNKNOWN_LOCATION)
5766 location = cfun->function_end_locus;
5767 warning (0, "%Hcontrol reaches end of non-void function", &location);
5768 #else
5769 locus = EXPR_LOCUS (last);
5770 if (!locus)
5771 locus = &cfun->function_end_locus;
5772 warning (0, "%Hcontrol reaches end of non-void function", locus);
5773 #endif
5774 TREE_NO_WARNING (cfun->decl) = 1;
5775 break;
5776 }
5777 }
5778 }
5779 return 0;
5780 }
5781
5782
5783 /* Given a basic block B which ends with a conditional and has
5784 precisely two successors, determine which of the edges is taken if
5785 the conditional is true and which is taken if the conditional is
5786 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5787
5788 void
5789 extract_true_false_edges_from_block (basic_block b,
5790 edge *true_edge,
5791 edge *false_edge)
5792 {
5793 edge e = EDGE_SUCC (b, 0);
5794
5795 if (e->flags & EDGE_TRUE_VALUE)
5796 {
5797 *true_edge = e;
5798 *false_edge = EDGE_SUCC (b, 1);
5799 }
5800 else
5801 {
5802 *false_edge = e;
5803 *true_edge = EDGE_SUCC (b, 1);
5804 }
5805 }
5806
5807 struct tree_opt_pass pass_warn_function_return =
5808 {
5809 NULL, /* name */
5810 NULL, /* gate */
5811 execute_warn_function_return, /* execute */
5812 NULL, /* sub */
5813 NULL, /* next */
5814 0, /* static_pass_number */
5815 0, /* tv_id */
5816 PROP_cfg, /* properties_required */
5817 0, /* properties_provided */
5818 0, /* properties_destroyed */
5819 0, /* todo_flags_start */
5820 0, /* todo_flags_finish */
5821 0 /* letter */
5822 };
5823
5824 /* Emit noreturn warnings. */
5825
5826 static unsigned int
5827 execute_warn_function_noreturn (void)
5828 {
5829 if (warn_missing_noreturn
5830 && !TREE_THIS_VOLATILE (cfun->decl)
5831 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5832 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5833 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5834 "for attribute %<noreturn%>",
5835 cfun->decl);
5836 return 0;
5837 }
5838
5839 struct tree_opt_pass pass_warn_function_noreturn =
5840 {
5841 NULL, /* name */
5842 NULL, /* gate */
5843 execute_warn_function_noreturn, /* execute */
5844 NULL, /* sub */
5845 NULL, /* next */
5846 0, /* static_pass_number */
5847 0, /* tv_id */
5848 PROP_cfg, /* properties_required */
5849 0, /* properties_provided */
5850 0, /* properties_destroyed */
5851 0, /* todo_flags_start */
5852 0, /* todo_flags_finish */
5853 0 /* letter */
5854 };