re PR c++/24780 (ICE set_mem_attributes_minus_bitpos)
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "hashtab.h"
47 #include "tree-ssa-propagate.h"
48
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52 /* Local declarations. */
53
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
56
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of SWITCH_EXPRs.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70 struct edge_to_cases_elt
71 {
72 /* The edge itself. Necessary for hashing and equality tests. */
73 edge e;
74
75 /* The case labels associated with this edge. We link these up via
76 their TREE_CHAIN field, then we wipe out the TREE_CHAIN fields
77 when we destroy the hash table. This prevents problems when copying
78 SWITCH_EXPRs. */
79 tree case_labels;
80 };
81
82 static htab_t edge_to_cases;
83
84 /* CFG statistics. */
85 struct cfg_stats_d
86 {
87 long num_merged_labels;
88 };
89
90 static struct cfg_stats_d cfg_stats;
91
92 /* Nonzero if we found a computed goto while building basic blocks. */
93 static bool found_computed_goto;
94
95 /* Basic blocks and flowgraphs. */
96 static basic_block create_bb (void *, void *, basic_block);
97 static void make_blocks (tree);
98 static void factor_computed_gotos (void);
99
100 /* Edges. */
101 static void make_edges (void);
102 static void make_ctrl_stmt_edges (basic_block);
103 static void make_exit_edges (basic_block);
104 static void make_cond_expr_edges (basic_block);
105 static void make_switch_expr_edges (basic_block);
106 static void make_goto_expr_edges (basic_block);
107 static edge tree_redirect_edge_and_branch (edge, basic_block);
108 static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
109 static void split_critical_edges (void);
110
111 /* Various helpers. */
112 static inline bool stmt_starts_bb_p (tree, tree);
113 static int tree_verify_flow_info (void);
114 static void tree_make_forwarder_block (edge);
115 static void tree_cfg2vcg (FILE *);
116
117 /* Flowgraph optimization and cleanup. */
118 static void tree_merge_blocks (basic_block, basic_block);
119 static bool tree_can_merge_blocks_p (basic_block, basic_block);
120 static void remove_bb (basic_block);
121 static edge find_taken_edge_computed_goto (basic_block, tree);
122 static edge find_taken_edge_cond_expr (basic_block, tree);
123 static edge find_taken_edge_switch_expr (basic_block, tree);
124 static tree find_case_label_for_value (tree, tree);
125
126 void
127 init_empty_tree_cfg (void)
128 {
129 /* Initialize the basic block array. */
130 init_flow ();
131 profile_status = PROFILE_ABSENT;
132 n_basic_blocks = 0;
133 last_basic_block = 0;
134 VARRAY_BB_INIT (basic_block_info, initial_cfg_capacity, "basic_block_info");
135
136 /* Build a mapping of labels to their associated blocks. */
137 VARRAY_BB_INIT (label_to_block_map, initial_cfg_capacity,
138 "label to block map");
139
140 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR;
141 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR;
142 }
143
144 /*---------------------------------------------------------------------------
145 Create basic blocks
146 ---------------------------------------------------------------------------*/
147
148 /* Entry point to the CFG builder for trees. TP points to the list of
149 statements to be added to the flowgraph. */
150
151 static void
152 build_tree_cfg (tree *tp)
153 {
154 /* Register specific tree functions. */
155 tree_register_cfg_hooks ();
156
157 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
158
159 init_empty_tree_cfg ();
160
161 found_computed_goto = 0;
162 make_blocks (*tp);
163
164 /* Computed gotos are hell to deal with, especially if there are
165 lots of them with a large number of destinations. So we factor
166 them to a common computed goto location before we build the
167 edge list. After we convert back to normal form, we will un-factor
168 the computed gotos since factoring introduces an unwanted jump. */
169 if (found_computed_goto)
170 factor_computed_gotos ();
171
172 /* Make sure there is always at least one block, even if it's empty. */
173 if (n_basic_blocks == 0)
174 create_empty_bb (ENTRY_BLOCK_PTR);
175
176 /* Adjust the size of the array. */
177 VARRAY_GROW (basic_block_info, n_basic_blocks);
178
179 /* To speed up statement iterator walks, we first purge dead labels. */
180 cleanup_dead_labels ();
181
182 /* Group case nodes to reduce the number of edges.
183 We do this after cleaning up dead labels because otherwise we miss
184 a lot of obvious case merging opportunities. */
185 group_case_labels ();
186
187 /* Create the edges of the flowgraph. */
188 make_edges ();
189
190 /* Debugging dumps. */
191
192 /* Write the flowgraph to a VCG file. */
193 {
194 int local_dump_flags;
195 FILE *dump_file = dump_begin (TDI_vcg, &local_dump_flags);
196 if (dump_file)
197 {
198 tree_cfg2vcg (dump_file);
199 dump_end (TDI_vcg, dump_file);
200 }
201 }
202
203 #ifdef ENABLE_CHECKING
204 verify_stmts ();
205 #endif
206
207 /* Dump a textual representation of the flowgraph. */
208 if (dump_file)
209 dump_tree_cfg (dump_file, dump_flags);
210 }
211
212 static void
213 execute_build_cfg (void)
214 {
215 build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
216 }
217
218 struct tree_opt_pass pass_build_cfg =
219 {
220 "cfg", /* name */
221 NULL, /* gate */
222 execute_build_cfg, /* execute */
223 NULL, /* sub */
224 NULL, /* next */
225 0, /* static_pass_number */
226 TV_TREE_CFG, /* tv_id */
227 PROP_gimple_leh, /* properties_required */
228 PROP_cfg, /* properties_provided */
229 0, /* properties_destroyed */
230 0, /* todo_flags_start */
231 TODO_verify_stmts, /* todo_flags_finish */
232 0 /* letter */
233 };
234
235 /* Search the CFG for any computed gotos. If found, factor them to a
236 common computed goto site. Also record the location of that site so
237 that we can un-factor the gotos after we have converted back to
238 normal form. */
239
240 static void
241 factor_computed_gotos (void)
242 {
243 basic_block bb;
244 tree factored_label_decl = NULL;
245 tree var = NULL;
246 tree factored_computed_goto_label = NULL;
247 tree factored_computed_goto = NULL;
248
249 /* We know there are one or more computed gotos in this function.
250 Examine the last statement in each basic block to see if the block
251 ends with a computed goto. */
252
253 FOR_EACH_BB (bb)
254 {
255 block_stmt_iterator bsi = bsi_last (bb);
256 tree last;
257
258 if (bsi_end_p (bsi))
259 continue;
260 last = bsi_stmt (bsi);
261
262 /* Ignore the computed goto we create when we factor the original
263 computed gotos. */
264 if (last == factored_computed_goto)
265 continue;
266
267 /* If the last statement is a computed goto, factor it. */
268 if (computed_goto_p (last))
269 {
270 tree assignment;
271
272 /* The first time we find a computed goto we need to create
273 the factored goto block and the variable each original
274 computed goto will use for their goto destination. */
275 if (! factored_computed_goto)
276 {
277 basic_block new_bb = create_empty_bb (bb);
278 block_stmt_iterator new_bsi = bsi_start (new_bb);
279
280 /* Create the destination of the factored goto. Each original
281 computed goto will put its desired destination into this
282 variable and jump to the label we create immediately
283 below. */
284 var = create_tmp_var (ptr_type_node, "gotovar");
285
286 /* Build a label for the new block which will contain the
287 factored computed goto. */
288 factored_label_decl = create_artificial_label ();
289 factored_computed_goto_label
290 = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
291 bsi_insert_after (&new_bsi, factored_computed_goto_label,
292 BSI_NEW_STMT);
293
294 /* Build our new computed goto. */
295 factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
296 bsi_insert_after (&new_bsi, factored_computed_goto,
297 BSI_NEW_STMT);
298 }
299
300 /* Copy the original computed goto's destination into VAR. */
301 assignment = build (MODIFY_EXPR, ptr_type_node,
302 var, GOTO_DESTINATION (last));
303 bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
304
305 /* And re-vector the computed goto to the new destination. */
306 GOTO_DESTINATION (last) = factored_label_decl;
307 }
308 }
309 }
310
311
312 /* Build a flowgraph for the statement_list STMT_LIST. */
313
314 static void
315 make_blocks (tree stmt_list)
316 {
317 tree_stmt_iterator i = tsi_start (stmt_list);
318 tree stmt = NULL;
319 bool start_new_block = true;
320 bool first_stmt_of_list = true;
321 basic_block bb = ENTRY_BLOCK_PTR;
322
323 while (!tsi_end_p (i))
324 {
325 tree prev_stmt;
326
327 prev_stmt = stmt;
328 stmt = tsi_stmt (i);
329
330 /* If the statement starts a new basic block or if we have determined
331 in a previous pass that we need to create a new block for STMT, do
332 so now. */
333 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
334 {
335 if (!first_stmt_of_list)
336 stmt_list = tsi_split_statement_list_before (&i);
337 bb = create_basic_block (stmt_list, NULL, bb);
338 start_new_block = false;
339 }
340
341 /* Now add STMT to BB and create the subgraphs for special statement
342 codes. */
343 set_bb_for_stmt (stmt, bb);
344
345 if (computed_goto_p (stmt))
346 found_computed_goto = true;
347
348 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
349 next iteration. */
350 if (stmt_ends_bb_p (stmt))
351 start_new_block = true;
352
353 tsi_next (&i);
354 first_stmt_of_list = false;
355 }
356 }
357
358
359 /* Create and return a new empty basic block after bb AFTER. */
360
361 static basic_block
362 create_bb (void *h, void *e, basic_block after)
363 {
364 basic_block bb;
365
366 gcc_assert (!e);
367
368 /* Create and initialize a new basic block. Since alloc_block uses
369 ggc_alloc_cleared to allocate a basic block, we do not have to
370 clear the newly allocated basic block here. */
371 bb = alloc_block ();
372
373 bb->index = last_basic_block;
374 bb->flags = BB_NEW;
375 bb->stmt_list = h ? h : alloc_stmt_list ();
376
377 /* Add the new block to the linked list of blocks. */
378 link_block (bb, after);
379
380 /* Grow the basic block array if needed. */
381 if ((size_t) last_basic_block == VARRAY_SIZE (basic_block_info))
382 {
383 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
384 VARRAY_GROW (basic_block_info, new_size);
385 }
386
387 /* Add the newly created block to the array. */
388 BASIC_BLOCK (last_basic_block) = bb;
389
390 n_basic_blocks++;
391 last_basic_block++;
392
393 return bb;
394 }
395
396
397 /*---------------------------------------------------------------------------
398 Edge creation
399 ---------------------------------------------------------------------------*/
400
401 /* Fold COND_EXPR_COND of each COND_EXPR. */
402
403 void
404 fold_cond_expr_cond (void)
405 {
406 basic_block bb;
407
408 FOR_EACH_BB (bb)
409 {
410 tree stmt = last_stmt (bb);
411
412 if (stmt
413 && TREE_CODE (stmt) == COND_EXPR)
414 {
415 tree cond = fold (COND_EXPR_COND (stmt));
416 if (integer_zerop (cond))
417 COND_EXPR_COND (stmt) = boolean_false_node;
418 else if (integer_onep (cond))
419 COND_EXPR_COND (stmt) = boolean_true_node;
420 }
421 }
422 }
423
424 /* Join all the blocks in the flowgraph. */
425
426 static void
427 make_edges (void)
428 {
429 basic_block bb;
430
431 /* Create an edge from entry to the first block with executable
432 statements in it. */
433 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
434
435 /* Traverse the basic block array placing edges. */
436 FOR_EACH_BB (bb)
437 {
438 tree first = first_stmt (bb);
439 tree last = last_stmt (bb);
440
441 if (first)
442 {
443 /* Edges for statements that always alter flow control. */
444 if (is_ctrl_stmt (last))
445 make_ctrl_stmt_edges (bb);
446
447 /* Edges for statements that sometimes alter flow control. */
448 if (is_ctrl_altering_stmt (last))
449 make_exit_edges (bb);
450 }
451
452 /* Finally, if no edges were created above, this is a regular
453 basic block that only needs a fallthru edge. */
454 if (EDGE_COUNT (bb->succs) == 0)
455 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
456 }
457
458 /* We do not care about fake edges, so remove any that the CFG
459 builder inserted for completeness. */
460 remove_fake_exit_edges ();
461
462 /* Fold COND_EXPR_COND of each COND_EXPR. */
463 fold_cond_expr_cond ();
464
465 /* Clean up the graph and warn for unreachable code. */
466 cleanup_tree_cfg ();
467 }
468
469
470 /* Create edges for control statement at basic block BB. */
471
472 static void
473 make_ctrl_stmt_edges (basic_block bb)
474 {
475 tree last = last_stmt (bb);
476
477 gcc_assert (last);
478 switch (TREE_CODE (last))
479 {
480 case GOTO_EXPR:
481 make_goto_expr_edges (bb);
482 break;
483
484 case RETURN_EXPR:
485 make_edge (bb, EXIT_BLOCK_PTR, 0);
486 break;
487
488 case COND_EXPR:
489 make_cond_expr_edges (bb);
490 break;
491
492 case SWITCH_EXPR:
493 make_switch_expr_edges (bb);
494 break;
495
496 case RESX_EXPR:
497 make_eh_edges (last);
498 /* Yet another NORETURN hack. */
499 if (EDGE_COUNT (bb->succs) == 0)
500 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
501 break;
502
503 default:
504 gcc_unreachable ();
505 }
506 }
507
508
509 /* Create exit edges for statements in block BB that alter the flow of
510 control. Statements that alter the control flow are 'goto', 'return'
511 and calls to non-returning functions. */
512
513 static void
514 make_exit_edges (basic_block bb)
515 {
516 tree last = last_stmt (bb), op;
517
518 gcc_assert (last);
519 switch (TREE_CODE (last))
520 {
521 case RESX_EXPR:
522 break;
523 case CALL_EXPR:
524 /* If this function receives a nonlocal goto, then we need to
525 make edges from this call site to all the nonlocal goto
526 handlers. */
527 if (TREE_SIDE_EFFECTS (last)
528 && current_function_has_nonlocal_label)
529 make_goto_expr_edges (bb);
530
531 /* If this statement has reachable exception handlers, then
532 create abnormal edges to them. */
533 make_eh_edges (last);
534
535 /* Some calls are known not to return. For such calls we create
536 a fake edge.
537
538 We really need to revamp how we build edges so that it's not
539 such a bloody pain to avoid creating edges for this case since
540 all we do is remove these edges when we're done building the
541 CFG. */
542 if (call_expr_flags (last) & ECF_NORETURN)
543 {
544 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
545 return;
546 }
547
548 /* Don't forget the fall-thru edge. */
549 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
550 break;
551
552 case MODIFY_EXPR:
553 /* A MODIFY_EXPR may have a CALL_EXPR on its RHS and the CALL_EXPR
554 may have an abnormal edge. Search the RHS for this case and
555 create any required edges. */
556 op = get_call_expr_in (last);
557 if (op && TREE_SIDE_EFFECTS (op)
558 && current_function_has_nonlocal_label)
559 make_goto_expr_edges (bb);
560
561 make_eh_edges (last);
562 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
563 break;
564
565 default:
566 gcc_unreachable ();
567 }
568 }
569
570
571 /* Create the edges for a COND_EXPR starting at block BB.
572 At this point, both clauses must contain only simple gotos. */
573
574 static void
575 make_cond_expr_edges (basic_block bb)
576 {
577 tree entry = last_stmt (bb);
578 basic_block then_bb, else_bb;
579 tree then_label, else_label;
580 edge e;
581
582 gcc_assert (entry);
583 gcc_assert (TREE_CODE (entry) == COND_EXPR);
584
585 /* Entry basic blocks for each component. */
586 then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
587 else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
588 then_bb = label_to_block (then_label);
589 else_bb = label_to_block (else_label);
590
591 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
592 #ifdef USE_MAPPED_LOCATION
593 e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
594 #else
595 e->goto_locus = EXPR_LOCUS (COND_EXPR_THEN (entry));
596 #endif
597 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
598 if (e)
599 {
600 #ifdef USE_MAPPED_LOCATION
601 e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
602 #else
603 e->goto_locus = EXPR_LOCUS (COND_EXPR_ELSE (entry));
604 #endif
605 }
606 }
607
608 /* Hashing routine for EDGE_TO_CASES. */
609
610 static hashval_t
611 edge_to_cases_hash (const void *p)
612 {
613 edge e = ((struct edge_to_cases_elt *)p)->e;
614
615 /* Hash on the edge itself (which is a pointer). */
616 return htab_hash_pointer (e);
617 }
618
619 /* Equality routine for EDGE_TO_CASES, edges are unique, so testing
620 for equality is just a pointer comparison. */
621
622 static int
623 edge_to_cases_eq (const void *p1, const void *p2)
624 {
625 edge e1 = ((struct edge_to_cases_elt *)p1)->e;
626 edge e2 = ((struct edge_to_cases_elt *)p2)->e;
627
628 return e1 == e2;
629 }
630
631 /* Called for each element in the hash table (P) as we delete the
632 edge to cases hash table.
633
634 Clear all the TREE_CHAINs to prevent problems with copying of
635 SWITCH_EXPRs and structure sharing rules, then free the hash table
636 element. */
637
638 static void
639 edge_to_cases_cleanup (void *p)
640 {
641 struct edge_to_cases_elt *elt = p;
642 tree t, next;
643
644 for (t = elt->case_labels; t; t = next)
645 {
646 next = TREE_CHAIN (t);
647 TREE_CHAIN (t) = NULL;
648 }
649 free (p);
650 }
651
652 /* Start recording information mapping edges to case labels. */
653
654 void
655 start_recording_case_labels (void)
656 {
657 gcc_assert (edge_to_cases == NULL);
658
659 edge_to_cases = htab_create (37,
660 edge_to_cases_hash,
661 edge_to_cases_eq,
662 edge_to_cases_cleanup);
663 }
664
665 /* Return nonzero if we are recording information for case labels. */
666
667 static bool
668 recording_case_labels_p (void)
669 {
670 return (edge_to_cases != NULL);
671 }
672
673 /* Stop recording information mapping edges to case labels and
674 remove any information we have recorded. */
675 void
676 end_recording_case_labels (void)
677 {
678 htab_delete (edge_to_cases);
679 edge_to_cases = NULL;
680 }
681
682 /* Record that CASE_LABEL (a CASE_LABEL_EXPR) references edge E. */
683
684 static void
685 record_switch_edge (edge e, tree case_label)
686 {
687 struct edge_to_cases_elt *elt;
688 void **slot;
689
690 /* Build a hash table element so we can see if E is already
691 in the table. */
692 elt = xmalloc (sizeof (struct edge_to_cases_elt));
693 elt->e = e;
694 elt->case_labels = case_label;
695
696 slot = htab_find_slot (edge_to_cases, elt, INSERT);
697
698 if (*slot == NULL)
699 {
700 /* E was not in the hash table. Install E into the hash table. */
701 *slot = (void *)elt;
702 }
703 else
704 {
705 /* E was already in the hash table. Free ELT as we do not need it
706 anymore. */
707 free (elt);
708
709 /* Get the entry stored in the hash table. */
710 elt = (struct edge_to_cases_elt *) *slot;
711
712 /* Add it to the chain of CASE_LABEL_EXPRs referencing E. */
713 TREE_CHAIN (case_label) = elt->case_labels;
714 elt->case_labels = case_label;
715 }
716 }
717
718 /* If we are inside a {start,end}_recording_cases block, then return
719 a chain of CASE_LABEL_EXPRs from T which reference E.
720
721 Otherwise return NULL. */
722
723 static tree
724 get_cases_for_edge (edge e, tree t)
725 {
726 struct edge_to_cases_elt elt, *elt_p;
727 void **slot;
728 size_t i, n;
729 tree vec;
730
731 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
732 chains available. Return NULL so the caller can detect this case. */
733 if (!recording_case_labels_p ())
734 return NULL;
735
736 restart:
737 elt.e = e;
738 elt.case_labels = NULL;
739 slot = htab_find_slot (edge_to_cases, &elt, NO_INSERT);
740
741 if (slot)
742 {
743 elt_p = (struct edge_to_cases_elt *)*slot;
744 return elt_p->case_labels;
745 }
746
747 /* If we did not find E in the hash table, then this must be the first
748 time we have been queried for information about E & T. Add all the
749 elements from T to the hash table then perform the query again. */
750
751 vec = SWITCH_LABELS (t);
752 n = TREE_VEC_LENGTH (vec);
753 for (i = 0; i < n; i++)
754 {
755 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
756 basic_block label_bb = label_to_block (lab);
757 record_switch_edge (find_edge (e->src, label_bb), TREE_VEC_ELT (vec, i));
758 }
759 goto restart;
760 }
761
762 /* Create the edges for a SWITCH_EXPR starting at block BB.
763 At this point, the switch body has been lowered and the
764 SWITCH_LABELS filled in, so this is in effect a multi-way branch. */
765
766 static void
767 make_switch_expr_edges (basic_block bb)
768 {
769 tree entry = last_stmt (bb);
770 size_t i, n;
771 tree vec;
772
773 vec = SWITCH_LABELS (entry);
774 n = TREE_VEC_LENGTH (vec);
775
776 for (i = 0; i < n; ++i)
777 {
778 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
779 basic_block label_bb = label_to_block (lab);
780 make_edge (bb, label_bb, 0);
781 }
782 }
783
784
785 /* Return the basic block holding label DEST. */
786
787 basic_block
788 label_to_block_fn (struct function *ifun, tree dest)
789 {
790 int uid = LABEL_DECL_UID (dest);
791
792 /* We would die hard when faced by an undefined label. Emit a label to
793 the very first basic block. This will hopefully make even the dataflow
794 and undefined variable warnings quite right. */
795 if ((errorcount || sorrycount) && uid < 0)
796 {
797 block_stmt_iterator bsi = bsi_start (BASIC_BLOCK (0));
798 tree stmt;
799
800 stmt = build1 (LABEL_EXPR, void_type_node, dest);
801 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
802 uid = LABEL_DECL_UID (dest);
803 }
804 if (VARRAY_SIZE (ifun->cfg->x_label_to_block_map) <= (unsigned int)uid)
805 return NULL;
806 return VARRAY_BB (ifun->cfg->x_label_to_block_map, uid);
807 }
808
809 /* Create edges for a goto statement at block BB. */
810
811 static void
812 make_goto_expr_edges (basic_block bb)
813 {
814 tree goto_t;
815 basic_block target_bb;
816 int for_call;
817 block_stmt_iterator last = bsi_last (bb);
818
819 goto_t = bsi_stmt (last);
820
821 /* If the last statement is not a GOTO (i.e., it is a RETURN_EXPR,
822 CALL_EXPR or MODIFY_EXPR), then the edge is an abnormal edge resulting
823 from a nonlocal goto. */
824 if (TREE_CODE (goto_t) != GOTO_EXPR)
825 for_call = 1;
826 else
827 {
828 tree dest = GOTO_DESTINATION (goto_t);
829 for_call = 0;
830
831 /* A GOTO to a local label creates normal edges. */
832 if (simple_goto_p (goto_t))
833 {
834 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
835 #ifdef USE_MAPPED_LOCATION
836 e->goto_locus = EXPR_LOCATION (goto_t);
837 #else
838 e->goto_locus = EXPR_LOCUS (goto_t);
839 #endif
840 bsi_remove (&last);
841 return;
842 }
843
844 /* Nothing more to do for nonlocal gotos. */
845 if (TREE_CODE (dest) == LABEL_DECL)
846 return;
847
848 /* Computed gotos remain. */
849 }
850
851 /* Look for the block starting with the destination label. In the
852 case of a computed goto, make an edge to any label block we find
853 in the CFG. */
854 FOR_EACH_BB (target_bb)
855 {
856 block_stmt_iterator bsi;
857
858 for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
859 {
860 tree target = bsi_stmt (bsi);
861
862 if (TREE_CODE (target) != LABEL_EXPR)
863 break;
864
865 if (
866 /* Computed GOTOs. Make an edge to every label block that has
867 been marked as a potential target for a computed goto. */
868 (FORCED_LABEL (LABEL_EXPR_LABEL (target)) && for_call == 0)
869 /* Nonlocal GOTO target. Make an edge to every label block
870 that has been marked as a potential target for a nonlocal
871 goto. */
872 || (DECL_NONLOCAL (LABEL_EXPR_LABEL (target)) && for_call == 1))
873 {
874 make_edge (bb, target_bb, EDGE_ABNORMAL);
875 break;
876 }
877 }
878 }
879
880 /* Degenerate case of computed goto with no labels. */
881 if (!for_call && EDGE_COUNT (bb->succs) == 0)
882 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
883 }
884
885
886 /*---------------------------------------------------------------------------
887 Flowgraph analysis
888 ---------------------------------------------------------------------------*/
889
890 /* Cleanup useless labels in basic blocks. This is something we wish
891 to do early because it allows us to group case labels before creating
892 the edges for the CFG, and it speeds up block statement iterators in
893 all passes later on.
894 We only run this pass once, running it more than once is probably not
895 profitable. */
896
897 /* A map from basic block index to the leading label of that block. */
898 static tree *label_for_bb;
899
900 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
901 static void
902 update_eh_label (struct eh_region *region)
903 {
904 tree old_label = get_eh_region_tree_label (region);
905 if (old_label)
906 {
907 tree new_label;
908 basic_block bb = label_to_block (old_label);
909
910 /* ??? After optimizing, there may be EH regions with labels
911 that have already been removed from the function body, so
912 there is no basic block for them. */
913 if (! bb)
914 return;
915
916 new_label = label_for_bb[bb->index];
917 set_eh_region_tree_label (region, new_label);
918 }
919 }
920
921 /* Given LABEL return the first label in the same basic block. */
922 static tree
923 main_block_label (tree label)
924 {
925 basic_block bb = label_to_block (label);
926
927 /* label_to_block possibly inserted undefined label into the chain. */
928 if (!label_for_bb[bb->index])
929 label_for_bb[bb->index] = label;
930 return label_for_bb[bb->index];
931 }
932
933 /* Cleanup redundant labels. This is a three-step process:
934 1) Find the leading label for each block.
935 2) Redirect all references to labels to the leading labels.
936 3) Cleanup all useless labels. */
937
938 void
939 cleanup_dead_labels (void)
940 {
941 basic_block bb;
942 label_for_bb = xcalloc (last_basic_block, sizeof (tree));
943
944 /* Find a suitable label for each block. We use the first user-defined
945 label if there is one, or otherwise just the first label we see. */
946 FOR_EACH_BB (bb)
947 {
948 block_stmt_iterator i;
949
950 for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
951 {
952 tree label, stmt = bsi_stmt (i);
953
954 if (TREE_CODE (stmt) != LABEL_EXPR)
955 break;
956
957 label = LABEL_EXPR_LABEL (stmt);
958
959 /* If we have not yet seen a label for the current block,
960 remember this one and see if there are more labels. */
961 if (! label_for_bb[bb->index])
962 {
963 label_for_bb[bb->index] = label;
964 continue;
965 }
966
967 /* If we did see a label for the current block already, but it
968 is an artificially created label, replace it if the current
969 label is a user defined label. */
970 if (! DECL_ARTIFICIAL (label)
971 && DECL_ARTIFICIAL (label_for_bb[bb->index]))
972 {
973 label_for_bb[bb->index] = label;
974 break;
975 }
976 }
977 }
978
979 /* Now redirect all jumps/branches to the selected label.
980 First do so for each block ending in a control statement. */
981 FOR_EACH_BB (bb)
982 {
983 tree stmt = last_stmt (bb);
984 if (!stmt)
985 continue;
986
987 switch (TREE_CODE (stmt))
988 {
989 case COND_EXPR:
990 {
991 tree true_branch, false_branch;
992
993 true_branch = COND_EXPR_THEN (stmt);
994 false_branch = COND_EXPR_ELSE (stmt);
995
996 GOTO_DESTINATION (true_branch)
997 = main_block_label (GOTO_DESTINATION (true_branch));
998 GOTO_DESTINATION (false_branch)
999 = main_block_label (GOTO_DESTINATION (false_branch));
1000
1001 break;
1002 }
1003
1004 case SWITCH_EXPR:
1005 {
1006 size_t i;
1007 tree vec = SWITCH_LABELS (stmt);
1008 size_t n = TREE_VEC_LENGTH (vec);
1009
1010 /* Replace all destination labels. */
1011 for (i = 0; i < n; ++i)
1012 {
1013 tree elt = TREE_VEC_ELT (vec, i);
1014 tree label = main_block_label (CASE_LABEL (elt));
1015 CASE_LABEL (elt) = label;
1016 }
1017 break;
1018 }
1019
1020 /* We have to handle GOTO_EXPRs until they're removed, and we don't
1021 remove them until after we've created the CFG edges. */
1022 case GOTO_EXPR:
1023 if (! computed_goto_p (stmt))
1024 {
1025 GOTO_DESTINATION (stmt)
1026 = main_block_label (GOTO_DESTINATION (stmt));
1027 break;
1028 }
1029
1030 default:
1031 break;
1032 }
1033 }
1034
1035 for_each_eh_region (update_eh_label);
1036
1037 /* Finally, purge dead labels. All user-defined labels and labels that
1038 can be the target of non-local gotos are preserved. */
1039 FOR_EACH_BB (bb)
1040 {
1041 block_stmt_iterator i;
1042 tree label_for_this_bb = label_for_bb[bb->index];
1043
1044 if (! label_for_this_bb)
1045 continue;
1046
1047 for (i = bsi_start (bb); !bsi_end_p (i); )
1048 {
1049 tree label, stmt = bsi_stmt (i);
1050
1051 if (TREE_CODE (stmt) != LABEL_EXPR)
1052 break;
1053
1054 label = LABEL_EXPR_LABEL (stmt);
1055
1056 if (label == label_for_this_bb
1057 || ! DECL_ARTIFICIAL (label)
1058 || DECL_NONLOCAL (label))
1059 bsi_next (&i);
1060 else
1061 bsi_remove (&i);
1062 }
1063 }
1064
1065 free (label_for_bb);
1066 }
1067
1068 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1069 and scan the sorted vector of cases. Combine the ones jumping to the
1070 same label.
1071 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1072
1073 void
1074 group_case_labels (void)
1075 {
1076 basic_block bb;
1077
1078 FOR_EACH_BB (bb)
1079 {
1080 tree stmt = last_stmt (bb);
1081 if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
1082 {
1083 tree labels = SWITCH_LABELS (stmt);
1084 int old_size = TREE_VEC_LENGTH (labels);
1085 int i, j, new_size = old_size;
1086 tree default_case = TREE_VEC_ELT (labels, old_size - 1);
1087 tree default_label;
1088
1089 /* The default label is always the last case in a switch
1090 statement after gimplification. */
1091 default_label = CASE_LABEL (default_case);
1092
1093 /* Look for possible opportunities to merge cases.
1094 Ignore the last element of the label vector because it
1095 must be the default case. */
1096 i = 0;
1097 while (i < old_size - 1)
1098 {
1099 tree base_case, base_label, base_high;
1100 base_case = TREE_VEC_ELT (labels, i);
1101
1102 gcc_assert (base_case);
1103 base_label = CASE_LABEL (base_case);
1104
1105 /* Discard cases that have the same destination as the
1106 default case. */
1107 if (base_label == default_label)
1108 {
1109 TREE_VEC_ELT (labels, i) = NULL_TREE;
1110 i++;
1111 new_size--;
1112 continue;
1113 }
1114
1115 base_high = CASE_HIGH (base_case) ?
1116 CASE_HIGH (base_case) : CASE_LOW (base_case);
1117 i++;
1118 /* Try to merge case labels. Break out when we reach the end
1119 of the label vector or when we cannot merge the next case
1120 label with the current one. */
1121 while (i < old_size - 1)
1122 {
1123 tree merge_case = TREE_VEC_ELT (labels, i);
1124 tree merge_label = CASE_LABEL (merge_case);
1125 tree t = int_const_binop (PLUS_EXPR, base_high,
1126 integer_one_node, 1);
1127
1128 /* Merge the cases if they jump to the same place,
1129 and their ranges are consecutive. */
1130 if (merge_label == base_label
1131 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1132 {
1133 base_high = CASE_HIGH (merge_case) ?
1134 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1135 CASE_HIGH (base_case) = base_high;
1136 TREE_VEC_ELT (labels, i) = NULL_TREE;
1137 new_size--;
1138 i++;
1139 }
1140 else
1141 break;
1142 }
1143 }
1144
1145 /* Compress the case labels in the label vector, and adjust the
1146 length of the vector. */
1147 for (i = 0, j = 0; i < new_size; i++)
1148 {
1149 while (! TREE_VEC_ELT (labels, j))
1150 j++;
1151 TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
1152 }
1153 TREE_VEC_LENGTH (labels) = new_size;
1154 }
1155 }
1156 }
1157
1158 /* Checks whether we can merge block B into block A. */
1159
1160 static bool
1161 tree_can_merge_blocks_p (basic_block a, basic_block b)
1162 {
1163 tree stmt;
1164 block_stmt_iterator bsi;
1165 tree phi;
1166
1167 if (!single_succ_p (a))
1168 return false;
1169
1170 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1171 return false;
1172
1173 if (single_succ (a) != b)
1174 return false;
1175
1176 if (!single_pred_p (b))
1177 return false;
1178
1179 if (b == EXIT_BLOCK_PTR)
1180 return false;
1181
1182 /* If A ends by a statement causing exceptions or something similar, we
1183 cannot merge the blocks. */
1184 stmt = last_stmt (a);
1185 if (stmt && stmt_ends_bb_p (stmt))
1186 return false;
1187
1188 /* Do not allow a block with only a non-local label to be merged. */
1189 if (stmt && TREE_CODE (stmt) == LABEL_EXPR
1190 && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
1191 return false;
1192
1193 /* It must be possible to eliminate all phi nodes in B. If ssa form
1194 is not up-to-date, we cannot eliminate any phis. */
1195 phi = phi_nodes (b);
1196 if (phi)
1197 {
1198 if (need_ssa_update_p ())
1199 return false;
1200
1201 for (; phi; phi = PHI_CHAIN (phi))
1202 if (!is_gimple_reg (PHI_RESULT (phi))
1203 && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
1204 return false;
1205 }
1206
1207 /* Do not remove user labels. */
1208 for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
1209 {
1210 stmt = bsi_stmt (bsi);
1211 if (TREE_CODE (stmt) != LABEL_EXPR)
1212 break;
1213 if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
1214 return false;
1215 }
1216
1217 /* Protect the loop latches. */
1218 if (current_loops
1219 && b->loop_father->latch == b)
1220 return false;
1221
1222 return true;
1223 }
1224
1225 /* Replaces all uses of NAME by VAL. */
1226
1227 void
1228 replace_uses_by (tree name, tree val)
1229 {
1230 imm_use_iterator imm_iter;
1231 use_operand_p use;
1232 tree stmt;
1233 edge e;
1234 unsigned i;
1235 VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
1236
1237 FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
1238 {
1239 stmt = USE_STMT (use);
1240 replace_exp (use, val);
1241
1242 if (TREE_CODE (stmt) == PHI_NODE)
1243 {
1244 e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
1245 if (e->flags & EDGE_ABNORMAL)
1246 {
1247 /* This can only occur for virtual operands, since
1248 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1249 would prevent replacement. */
1250 gcc_assert (!is_gimple_reg (name));
1251 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1252 }
1253 }
1254 else
1255 VEC_safe_push (tree, heap, stmts, stmt);
1256 }
1257
1258 /* We do not update the statements in the loop above. Consider
1259 x = w * w;
1260
1261 If we performed the update in the first loop, the statement
1262 would be rescanned after first occurrence of w is replaced,
1263 the new uses would be placed to the beginning of the list,
1264 and we would never process them. */
1265 for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
1266 {
1267 tree rhs;
1268
1269 fold_stmt_inplace (stmt);
1270
1271 rhs = get_rhs (stmt);
1272 if (TREE_CODE (rhs) == ADDR_EXPR)
1273 recompute_tree_invarant_for_addr_expr (rhs);
1274
1275 /* If the statement could throw and now cannot, we need to prune cfg. */
1276 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
1277 tree_purge_dead_eh_edges (bb_for_stmt (stmt));
1278
1279 mark_new_vars_to_rename (stmt);
1280 }
1281
1282 VEC_free (tree, heap, stmts);
1283
1284 /* Also update the trees stored in loop structures. */
1285 if (current_loops)
1286 {
1287 struct loop *loop;
1288
1289 for (i = 0; i < current_loops->num; i++)
1290 {
1291 loop = current_loops->parray[i];
1292 if (loop)
1293 substitute_in_loop_info (loop, name, val);
1294 }
1295 }
1296 }
1297
1298 /* Merge block B into block A. */
1299
1300 static void
1301 tree_merge_blocks (basic_block a, basic_block b)
1302 {
1303 block_stmt_iterator bsi;
1304 tree_stmt_iterator last;
1305 tree phi;
1306
1307 if (dump_file)
1308 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1309
1310 /* Remove all single-valued PHI nodes from block B of the form
1311 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1312 bsi = bsi_last (a);
1313 for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
1314 {
1315 tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
1316 tree copy;
1317 bool may_replace_uses = may_propagate_copy (def, use);
1318
1319 /* In case we have loops to care about, do not propagate arguments of
1320 loop closed ssa phi nodes. */
1321 if (current_loops
1322 && is_gimple_reg (def)
1323 && TREE_CODE (use) == SSA_NAME
1324 && a->loop_father != b->loop_father)
1325 may_replace_uses = false;
1326
1327 if (!may_replace_uses)
1328 {
1329 gcc_assert (is_gimple_reg (def));
1330
1331 /* Note that just emitting the copies is fine -- there is no problem
1332 with ordering of phi nodes. This is because A is the single
1333 predecessor of B, therefore results of the phi nodes cannot
1334 appear as arguments of the phi nodes. */
1335 copy = build2 (MODIFY_EXPR, void_type_node, def, use);
1336 bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
1337 SET_PHI_RESULT (phi, NULL_TREE);
1338 SSA_NAME_DEF_STMT (def) = copy;
1339 }
1340 else
1341 replace_uses_by (def, use);
1342
1343 remove_phi_node (phi, NULL);
1344 }
1345
1346 /* Ensure that B follows A. */
1347 move_block_after (b, a);
1348
1349 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1350 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1351
1352 /* Remove labels from B and set bb_for_stmt to A for other statements. */
1353 for (bsi = bsi_start (b); !bsi_end_p (bsi);)
1354 {
1355 if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
1356 {
1357 tree label = bsi_stmt (bsi);
1358
1359 bsi_remove (&bsi);
1360 /* Now that we can thread computed gotos, we might have
1361 a situation where we have a forced label in block B
1362 However, the label at the start of block B might still be
1363 used in other ways (think about the runtime checking for
1364 Fortran assigned gotos). So we can not just delete the
1365 label. Instead we move the label to the start of block A. */
1366 if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
1367 {
1368 block_stmt_iterator dest_bsi = bsi_start (a);
1369 bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
1370 }
1371 }
1372 else
1373 {
1374 set_bb_for_stmt (bsi_stmt (bsi), a);
1375 bsi_next (&bsi);
1376 }
1377 }
1378
1379 /* Merge the chains. */
1380 last = tsi_last (a->stmt_list);
1381 tsi_link_after (&last, b->stmt_list, TSI_NEW_STMT);
1382 b->stmt_list = NULL;
1383 }
1384
1385
1386 /* Walk the function tree removing unnecessary statements.
1387
1388 * Empty statement nodes are removed
1389
1390 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1391
1392 * Unnecessary COND_EXPRs are removed
1393
1394 * Some unnecessary BIND_EXPRs are removed
1395
1396 Clearly more work could be done. The trick is doing the analysis
1397 and removal fast enough to be a net improvement in compile times.
1398
1399 Note that when we remove a control structure such as a COND_EXPR
1400 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1401 to ensure we eliminate all the useless code. */
1402
1403 struct rus_data
1404 {
1405 tree *last_goto;
1406 bool repeat;
1407 bool may_throw;
1408 bool may_branch;
1409 bool has_label;
1410 };
1411
1412 static void remove_useless_stmts_1 (tree *, struct rus_data *);
1413
1414 static bool
1415 remove_useless_stmts_warn_notreached (tree stmt)
1416 {
1417 if (EXPR_HAS_LOCATION (stmt))
1418 {
1419 location_t loc = EXPR_LOCATION (stmt);
1420 if (LOCATION_LINE (loc) > 0)
1421 {
1422 warning (0, "%Hwill never be executed", &loc);
1423 return true;
1424 }
1425 }
1426
1427 switch (TREE_CODE (stmt))
1428 {
1429 case STATEMENT_LIST:
1430 {
1431 tree_stmt_iterator i;
1432 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1433 if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
1434 return true;
1435 }
1436 break;
1437
1438 case COND_EXPR:
1439 if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
1440 return true;
1441 if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
1442 return true;
1443 if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
1444 return true;
1445 break;
1446
1447 case TRY_FINALLY_EXPR:
1448 case TRY_CATCH_EXPR:
1449 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
1450 return true;
1451 if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
1452 return true;
1453 break;
1454
1455 case CATCH_EXPR:
1456 return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
1457 case EH_FILTER_EXPR:
1458 return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
1459 case BIND_EXPR:
1460 return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
1461
1462 default:
1463 /* Not a live container. */
1464 break;
1465 }
1466
1467 return false;
1468 }
1469
1470 static void
1471 remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
1472 {
1473 tree then_clause, else_clause, cond;
1474 bool save_has_label, then_has_label, else_has_label;
1475
1476 save_has_label = data->has_label;
1477 data->has_label = false;
1478 data->last_goto = NULL;
1479
1480 remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
1481
1482 then_has_label = data->has_label;
1483 data->has_label = false;
1484 data->last_goto = NULL;
1485
1486 remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
1487
1488 else_has_label = data->has_label;
1489 data->has_label = save_has_label | then_has_label | else_has_label;
1490
1491 then_clause = COND_EXPR_THEN (*stmt_p);
1492 else_clause = COND_EXPR_ELSE (*stmt_p);
1493 cond = fold (COND_EXPR_COND (*stmt_p));
1494
1495 /* If neither arm does anything at all, we can remove the whole IF. */
1496 if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
1497 {
1498 *stmt_p = build_empty_stmt ();
1499 data->repeat = true;
1500 }
1501
1502 /* If there are no reachable statements in an arm, then we can
1503 zap the entire conditional. */
1504 else if (integer_nonzerop (cond) && !else_has_label)
1505 {
1506 if (warn_notreached)
1507 remove_useless_stmts_warn_notreached (else_clause);
1508 *stmt_p = then_clause;
1509 data->repeat = true;
1510 }
1511 else if (integer_zerop (cond) && !then_has_label)
1512 {
1513 if (warn_notreached)
1514 remove_useless_stmts_warn_notreached (then_clause);
1515 *stmt_p = else_clause;
1516 data->repeat = true;
1517 }
1518
1519 /* Check a couple of simple things on then/else with single stmts. */
1520 else
1521 {
1522 tree then_stmt = expr_only (then_clause);
1523 tree else_stmt = expr_only (else_clause);
1524
1525 /* Notice branches to a common destination. */
1526 if (then_stmt && else_stmt
1527 && TREE_CODE (then_stmt) == GOTO_EXPR
1528 && TREE_CODE (else_stmt) == GOTO_EXPR
1529 && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
1530 {
1531 *stmt_p = then_stmt;
1532 data->repeat = true;
1533 }
1534
1535 /* If the THEN/ELSE clause merely assigns a value to a variable or
1536 parameter which is already known to contain that value, then
1537 remove the useless THEN/ELSE clause. */
1538 else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
1539 {
1540 if (else_stmt
1541 && TREE_CODE (else_stmt) == MODIFY_EXPR
1542 && TREE_OPERAND (else_stmt, 0) == cond
1543 && integer_zerop (TREE_OPERAND (else_stmt, 1)))
1544 COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
1545 }
1546 else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
1547 && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
1548 || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
1549 && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
1550 {
1551 tree stmt = (TREE_CODE (cond) == EQ_EXPR
1552 ? then_stmt : else_stmt);
1553 tree *location = (TREE_CODE (cond) == EQ_EXPR
1554 ? &COND_EXPR_THEN (*stmt_p)
1555 : &COND_EXPR_ELSE (*stmt_p));
1556
1557 if (stmt
1558 && TREE_CODE (stmt) == MODIFY_EXPR
1559 && TREE_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
1560 && TREE_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
1561 *location = alloc_stmt_list ();
1562 }
1563 }
1564
1565 /* Protect GOTOs in the arm of COND_EXPRs from being removed. They
1566 would be re-introduced during lowering. */
1567 data->last_goto = NULL;
1568 }
1569
1570
1571 static void
1572 remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
1573 {
1574 bool save_may_branch, save_may_throw;
1575 bool this_may_branch, this_may_throw;
1576
1577 /* Collect may_branch and may_throw information for the body only. */
1578 save_may_branch = data->may_branch;
1579 save_may_throw = data->may_throw;
1580 data->may_branch = false;
1581 data->may_throw = false;
1582 data->last_goto = NULL;
1583
1584 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1585
1586 this_may_branch = data->may_branch;
1587 this_may_throw = data->may_throw;
1588 data->may_branch |= save_may_branch;
1589 data->may_throw |= save_may_throw;
1590 data->last_goto = NULL;
1591
1592 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1593
1594 /* If the body is empty, then we can emit the FINALLY block without
1595 the enclosing TRY_FINALLY_EXPR. */
1596 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
1597 {
1598 *stmt_p = TREE_OPERAND (*stmt_p, 1);
1599 data->repeat = true;
1600 }
1601
1602 /* If the handler is empty, then we can emit the TRY block without
1603 the enclosing TRY_FINALLY_EXPR. */
1604 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1605 {
1606 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1607 data->repeat = true;
1608 }
1609
1610 /* If the body neither throws, nor branches, then we can safely
1611 string the TRY and FINALLY blocks together. */
1612 else if (!this_may_branch && !this_may_throw)
1613 {
1614 tree stmt = *stmt_p;
1615 *stmt_p = TREE_OPERAND (stmt, 0);
1616 append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
1617 data->repeat = true;
1618 }
1619 }
1620
1621
1622 static void
1623 remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
1624 {
1625 bool save_may_throw, this_may_throw;
1626 tree_stmt_iterator i;
1627 tree stmt;
1628
1629 /* Collect may_throw information for the body only. */
1630 save_may_throw = data->may_throw;
1631 data->may_throw = false;
1632 data->last_goto = NULL;
1633
1634 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
1635
1636 this_may_throw = data->may_throw;
1637 data->may_throw = save_may_throw;
1638
1639 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1640 if (!this_may_throw)
1641 {
1642 if (warn_notreached)
1643 remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
1644 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1645 data->repeat = true;
1646 return;
1647 }
1648
1649 /* Process the catch clause specially. We may be able to tell that
1650 no exceptions propagate past this point. */
1651
1652 this_may_throw = true;
1653 i = tsi_start (TREE_OPERAND (*stmt_p, 1));
1654 stmt = tsi_stmt (i);
1655 data->last_goto = NULL;
1656
1657 switch (TREE_CODE (stmt))
1658 {
1659 case CATCH_EXPR:
1660 for (; !tsi_end_p (i); tsi_next (&i))
1661 {
1662 stmt = tsi_stmt (i);
1663 /* If we catch all exceptions, then the body does not
1664 propagate exceptions past this point. */
1665 if (CATCH_TYPES (stmt) == NULL)
1666 this_may_throw = false;
1667 data->last_goto = NULL;
1668 remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
1669 }
1670 break;
1671
1672 case EH_FILTER_EXPR:
1673 if (EH_FILTER_MUST_NOT_THROW (stmt))
1674 this_may_throw = false;
1675 else if (EH_FILTER_TYPES (stmt) == NULL)
1676 this_may_throw = false;
1677 remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
1678 break;
1679
1680 default:
1681 /* Otherwise this is a cleanup. */
1682 remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
1683
1684 /* If the cleanup is empty, then we can emit the TRY block without
1685 the enclosing TRY_CATCH_EXPR. */
1686 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
1687 {
1688 *stmt_p = TREE_OPERAND (*stmt_p, 0);
1689 data->repeat = true;
1690 }
1691 break;
1692 }
1693 data->may_throw |= this_may_throw;
1694 }
1695
1696
1697 static void
1698 remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
1699 {
1700 tree block;
1701
1702 /* First remove anything underneath the BIND_EXPR. */
1703 remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
1704
1705 /* If the BIND_EXPR has no variables, then we can pull everything
1706 up one level and remove the BIND_EXPR, unless this is the toplevel
1707 BIND_EXPR for the current function or an inlined function.
1708
1709 When this situation occurs we will want to apply this
1710 optimization again. */
1711 block = BIND_EXPR_BLOCK (*stmt_p);
1712 if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
1713 && *stmt_p != DECL_SAVED_TREE (current_function_decl)
1714 && (! block
1715 || ! BLOCK_ABSTRACT_ORIGIN (block)
1716 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1717 != FUNCTION_DECL)))
1718 {
1719 *stmt_p = BIND_EXPR_BODY (*stmt_p);
1720 data->repeat = true;
1721 }
1722 }
1723
1724
1725 static void
1726 remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
1727 {
1728 tree dest = GOTO_DESTINATION (*stmt_p);
1729
1730 data->may_branch = true;
1731 data->last_goto = NULL;
1732
1733 /* Record the last goto expr, so that we can delete it if unnecessary. */
1734 if (TREE_CODE (dest) == LABEL_DECL)
1735 data->last_goto = stmt_p;
1736 }
1737
1738
1739 static void
1740 remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
1741 {
1742 tree label = LABEL_EXPR_LABEL (*stmt_p);
1743
1744 data->has_label = true;
1745
1746 /* We do want to jump across non-local label receiver code. */
1747 if (DECL_NONLOCAL (label))
1748 data->last_goto = NULL;
1749
1750 else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
1751 {
1752 *data->last_goto = build_empty_stmt ();
1753 data->repeat = true;
1754 }
1755
1756 /* ??? Add something here to delete unused labels. */
1757 }
1758
1759
1760 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
1761 decl. This allows us to eliminate redundant or useless
1762 calls to "const" functions.
1763
1764 Gimplifier already does the same operation, but we may notice functions
1765 being const and pure once their calls has been gimplified, so we need
1766 to update the flag. */
1767
1768 static void
1769 update_call_expr_flags (tree call)
1770 {
1771 tree decl = get_callee_fndecl (call);
1772 if (!decl)
1773 return;
1774 if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
1775 TREE_SIDE_EFFECTS (call) = 0;
1776 if (TREE_NOTHROW (decl))
1777 TREE_NOTHROW (call) = 1;
1778 }
1779
1780
1781 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1782
1783 void
1784 notice_special_calls (tree t)
1785 {
1786 int flags = call_expr_flags (t);
1787
1788 if (flags & ECF_MAY_BE_ALLOCA)
1789 current_function_calls_alloca = true;
1790 if (flags & ECF_RETURNS_TWICE)
1791 current_function_calls_setjmp = true;
1792 }
1793
1794
1795 /* Clear flags set by notice_special_calls. Used by dead code removal
1796 to update the flags. */
1797
1798 void
1799 clear_special_calls (void)
1800 {
1801 current_function_calls_alloca = false;
1802 current_function_calls_setjmp = false;
1803 }
1804
1805
1806 static void
1807 remove_useless_stmts_1 (tree *tp, struct rus_data *data)
1808 {
1809 tree t = *tp, op;
1810
1811 switch (TREE_CODE (t))
1812 {
1813 case COND_EXPR:
1814 remove_useless_stmts_cond (tp, data);
1815 break;
1816
1817 case TRY_FINALLY_EXPR:
1818 remove_useless_stmts_tf (tp, data);
1819 break;
1820
1821 case TRY_CATCH_EXPR:
1822 remove_useless_stmts_tc (tp, data);
1823 break;
1824
1825 case BIND_EXPR:
1826 remove_useless_stmts_bind (tp, data);
1827 break;
1828
1829 case GOTO_EXPR:
1830 remove_useless_stmts_goto (tp, data);
1831 break;
1832
1833 case LABEL_EXPR:
1834 remove_useless_stmts_label (tp, data);
1835 break;
1836
1837 case RETURN_EXPR:
1838 fold_stmt (tp);
1839 data->last_goto = NULL;
1840 data->may_branch = true;
1841 break;
1842
1843 case CALL_EXPR:
1844 fold_stmt (tp);
1845 data->last_goto = NULL;
1846 notice_special_calls (t);
1847 update_call_expr_flags (t);
1848 if (tree_could_throw_p (t))
1849 data->may_throw = true;
1850 break;
1851
1852 case MODIFY_EXPR:
1853 data->last_goto = NULL;
1854 fold_stmt (tp);
1855 op = get_call_expr_in (t);
1856 if (op)
1857 {
1858 update_call_expr_flags (op);
1859 notice_special_calls (op);
1860 }
1861 if (tree_could_throw_p (t))
1862 data->may_throw = true;
1863 break;
1864
1865 case STATEMENT_LIST:
1866 {
1867 tree_stmt_iterator i = tsi_start (t);
1868 while (!tsi_end_p (i))
1869 {
1870 t = tsi_stmt (i);
1871 if (IS_EMPTY_STMT (t))
1872 {
1873 tsi_delink (&i);
1874 continue;
1875 }
1876
1877 remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
1878
1879 t = tsi_stmt (i);
1880 if (TREE_CODE (t) == STATEMENT_LIST)
1881 {
1882 tsi_link_before (&i, t, TSI_SAME_STMT);
1883 tsi_delink (&i);
1884 }
1885 else
1886 tsi_next (&i);
1887 }
1888 }
1889 break;
1890 case ASM_EXPR:
1891 fold_stmt (tp);
1892 data->last_goto = NULL;
1893 break;
1894
1895 default:
1896 data->last_goto = NULL;
1897 break;
1898 }
1899 }
1900
1901 static void
1902 remove_useless_stmts (void)
1903 {
1904 struct rus_data data;
1905
1906 clear_special_calls ();
1907
1908 do
1909 {
1910 memset (&data, 0, sizeof (data));
1911 remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
1912 }
1913 while (data.repeat);
1914 }
1915
1916
1917 struct tree_opt_pass pass_remove_useless_stmts =
1918 {
1919 "useless", /* name */
1920 NULL, /* gate */
1921 remove_useless_stmts, /* execute */
1922 NULL, /* sub */
1923 NULL, /* next */
1924 0, /* static_pass_number */
1925 0, /* tv_id */
1926 PROP_gimple_any, /* properties_required */
1927 0, /* properties_provided */
1928 0, /* properties_destroyed */
1929 0, /* todo_flags_start */
1930 TODO_dump_func, /* todo_flags_finish */
1931 0 /* letter */
1932 };
1933
1934 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1935
1936 static void
1937 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1938 {
1939 tree phi;
1940
1941 /* Since this block is no longer reachable, we can just delete all
1942 of its PHI nodes. */
1943 phi = phi_nodes (bb);
1944 while (phi)
1945 {
1946 tree next = PHI_CHAIN (phi);
1947 remove_phi_node (phi, NULL_TREE);
1948 phi = next;
1949 }
1950
1951 /* Remove edges to BB's successors. */
1952 while (EDGE_COUNT (bb->succs) > 0)
1953 remove_edge (EDGE_SUCC (bb, 0));
1954 }
1955
1956
1957 /* Remove statements of basic block BB. */
1958
1959 static void
1960 remove_bb (basic_block bb)
1961 {
1962 block_stmt_iterator i;
1963 #ifdef USE_MAPPED_LOCATION
1964 source_location loc = UNKNOWN_LOCATION;
1965 #else
1966 source_locus loc = 0;
1967 #endif
1968
1969 if (dump_file)
1970 {
1971 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1972 if (dump_flags & TDF_DETAILS)
1973 {
1974 dump_bb (bb, dump_file, 0);
1975 fprintf (dump_file, "\n");
1976 }
1977 }
1978
1979 /* If we remove the header or the latch of a loop, mark the loop for
1980 removal by setting its header and latch to NULL. */
1981 if (current_loops)
1982 {
1983 struct loop *loop = bb->loop_father;
1984
1985 if (loop->latch == bb
1986 || loop->header == bb)
1987 {
1988 loop->latch = NULL;
1989 loop->header = NULL;
1990
1991 /* Also clean up the information associated with the loop. Updating
1992 it would waste time. More importantly, it may refer to ssa
1993 names that were defined in other removed basic block -- these
1994 ssa names are now removed and invalid. */
1995 free_numbers_of_iterations_estimates_loop (loop);
1996 }
1997 }
1998
1999 /* Remove all the instructions in the block. */
2000 for (i = bsi_start (bb); !bsi_end_p (i);)
2001 {
2002 tree stmt = bsi_stmt (i);
2003 if (TREE_CODE (stmt) == LABEL_EXPR
2004 && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
2005 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
2006 {
2007 basic_block new_bb;
2008 block_stmt_iterator new_bsi;
2009
2010 /* A non-reachable non-local label may still be referenced.
2011 But it no longer needs to carry the extra semantics of
2012 non-locality. */
2013 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
2014 {
2015 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
2016 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
2017 }
2018
2019 new_bb = bb->prev_bb;
2020 new_bsi = bsi_start (new_bb);
2021 bsi_remove (&i);
2022 bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
2023 }
2024 else
2025 {
2026 /* Release SSA definitions if we are in SSA. Note that we
2027 may be called when not in SSA. For example,
2028 final_cleanup calls this function via
2029 cleanup_tree_cfg. */
2030 if (in_ssa_p)
2031 release_defs (stmt);
2032
2033 bsi_remove (&i);
2034 }
2035
2036 /* Don't warn for removed gotos. Gotos are often removed due to
2037 jump threading, thus resulting in bogus warnings. Not great,
2038 since this way we lose warnings for gotos in the original
2039 program that are indeed unreachable. */
2040 if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
2041 {
2042 #ifdef USE_MAPPED_LOCATION
2043 if (EXPR_HAS_LOCATION (stmt))
2044 loc = EXPR_LOCATION (stmt);
2045 #else
2046 source_locus t;
2047 t = EXPR_LOCUS (stmt);
2048 if (t && LOCATION_LINE (*t) > 0)
2049 loc = t;
2050 #endif
2051 }
2052 }
2053
2054 /* If requested, give a warning that the first statement in the
2055 block is unreachable. We walk statements backwards in the
2056 loop above, so the last statement we process is the first statement
2057 in the block. */
2058 #ifdef USE_MAPPED_LOCATION
2059 if (loc > BUILTINS_LOCATION)
2060 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2061 #else
2062 if (loc)
2063 warning (OPT_Wunreachable_code, "%Hwill never be executed", loc);
2064 #endif
2065
2066 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2067 }
2068
2069
2070 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2071 predicate VAL, return the edge that will be taken out of the block.
2072 If VAL does not match a unique edge, NULL is returned. */
2073
2074 edge
2075 find_taken_edge (basic_block bb, tree val)
2076 {
2077 tree stmt;
2078
2079 stmt = last_stmt (bb);
2080
2081 gcc_assert (stmt);
2082 gcc_assert (is_ctrl_stmt (stmt));
2083 gcc_assert (val);
2084
2085 if (! is_gimple_min_invariant (val))
2086 return NULL;
2087
2088 if (TREE_CODE (stmt) == COND_EXPR)
2089 return find_taken_edge_cond_expr (bb, val);
2090
2091 if (TREE_CODE (stmt) == SWITCH_EXPR)
2092 return find_taken_edge_switch_expr (bb, val);
2093
2094 if (computed_goto_p (stmt))
2095 return find_taken_edge_computed_goto (bb, TREE_OPERAND( val, 0));
2096
2097 gcc_unreachable ();
2098 }
2099
2100 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2101 statement, determine which of the outgoing edges will be taken out of the
2102 block. Return NULL if either edge may be taken. */
2103
2104 static edge
2105 find_taken_edge_computed_goto (basic_block bb, tree val)
2106 {
2107 basic_block dest;
2108 edge e = NULL;
2109
2110 dest = label_to_block (val);
2111 if (dest)
2112 {
2113 e = find_edge (bb, dest);
2114 gcc_assert (e != NULL);
2115 }
2116
2117 return e;
2118 }
2119
2120 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2121 statement, determine which of the two edges will be taken out of the
2122 block. Return NULL if either edge may be taken. */
2123
2124 static edge
2125 find_taken_edge_cond_expr (basic_block bb, tree val)
2126 {
2127 edge true_edge, false_edge;
2128
2129 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2130
2131 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2132 return (zero_p (val) ? false_edge : true_edge);
2133 }
2134
2135 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2136 statement, determine which edge will be taken out of the block. Return
2137 NULL if any edge may be taken. */
2138
2139 static edge
2140 find_taken_edge_switch_expr (basic_block bb, tree val)
2141 {
2142 tree switch_expr, taken_case;
2143 basic_block dest_bb;
2144 edge e;
2145
2146 switch_expr = last_stmt (bb);
2147 taken_case = find_case_label_for_value (switch_expr, val);
2148 dest_bb = label_to_block (CASE_LABEL (taken_case));
2149
2150 e = find_edge (bb, dest_bb);
2151 gcc_assert (e);
2152 return e;
2153 }
2154
2155
2156 /* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
2157 We can make optimal use here of the fact that the case labels are
2158 sorted: We can do a binary search for a case matching VAL. */
2159
2160 static tree
2161 find_case_label_for_value (tree switch_expr, tree val)
2162 {
2163 tree vec = SWITCH_LABELS (switch_expr);
2164 size_t low, high, n = TREE_VEC_LENGTH (vec);
2165 tree default_case = TREE_VEC_ELT (vec, n - 1);
2166
2167 for (low = -1, high = n - 1; high - low > 1; )
2168 {
2169 size_t i = (high + low) / 2;
2170 tree t = TREE_VEC_ELT (vec, i);
2171 int cmp;
2172
2173 /* Cache the result of comparing CASE_LOW and val. */
2174 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2175
2176 if (cmp > 0)
2177 high = i;
2178 else
2179 low = i;
2180
2181 if (CASE_HIGH (t) == NULL)
2182 {
2183 /* A singe-valued case label. */
2184 if (cmp == 0)
2185 return t;
2186 }
2187 else
2188 {
2189 /* A case range. We can only handle integer ranges. */
2190 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2191 return t;
2192 }
2193 }
2194
2195 return default_case;
2196 }
2197
2198
2199
2200
2201 /*---------------------------------------------------------------------------
2202 Debugging functions
2203 ---------------------------------------------------------------------------*/
2204
2205 /* Dump tree-specific information of block BB to file OUTF. */
2206
2207 void
2208 tree_dump_bb (basic_block bb, FILE *outf, int indent)
2209 {
2210 dump_generic_bb (outf, bb, indent, TDF_VOPS);
2211 }
2212
2213
2214 /* Dump a basic block on stderr. */
2215
2216 void
2217 debug_tree_bb (basic_block bb)
2218 {
2219 dump_bb (bb, stderr, 0);
2220 }
2221
2222
2223 /* Dump basic block with index N on stderr. */
2224
2225 basic_block
2226 debug_tree_bb_n (int n)
2227 {
2228 debug_tree_bb (BASIC_BLOCK (n));
2229 return BASIC_BLOCK (n);
2230 }
2231
2232
2233 /* Dump the CFG on stderr.
2234
2235 FLAGS are the same used by the tree dumping functions
2236 (see TDF_* in tree.h). */
2237
2238 void
2239 debug_tree_cfg (int flags)
2240 {
2241 dump_tree_cfg (stderr, flags);
2242 }
2243
2244
2245 /* Dump the program showing basic block boundaries on the given FILE.
2246
2247 FLAGS are the same used by the tree dumping functions (see TDF_* in
2248 tree.h). */
2249
2250 void
2251 dump_tree_cfg (FILE *file, int flags)
2252 {
2253 if (flags & TDF_DETAILS)
2254 {
2255 const char *funcname
2256 = lang_hooks.decl_printable_name (current_function_decl, 2);
2257
2258 fputc ('\n', file);
2259 fprintf (file, ";; Function %s\n\n", funcname);
2260 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2261 n_basic_blocks, n_edges, last_basic_block);
2262
2263 brief_dump_cfg (file);
2264 fprintf (file, "\n");
2265 }
2266
2267 if (flags & TDF_STATS)
2268 dump_cfg_stats (file);
2269
2270 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2271 }
2272
2273
2274 /* Dump CFG statistics on FILE. */
2275
2276 void
2277 dump_cfg_stats (FILE *file)
2278 {
2279 static long max_num_merged_labels = 0;
2280 unsigned long size, total = 0;
2281 long num_edges;
2282 basic_block bb;
2283 const char * const fmt_str = "%-30s%-13s%12s\n";
2284 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2285 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2286 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2287 const char *funcname
2288 = lang_hooks.decl_printable_name (current_function_decl, 2);
2289
2290
2291 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2292
2293 fprintf (file, "---------------------------------------------------------\n");
2294 fprintf (file, fmt_str, "", " Number of ", "Memory");
2295 fprintf (file, fmt_str, "", " instances ", "used ");
2296 fprintf (file, "---------------------------------------------------------\n");
2297
2298 size = n_basic_blocks * sizeof (struct basic_block_def);
2299 total += size;
2300 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2301 SCALE (size), LABEL (size));
2302
2303 num_edges = 0;
2304 FOR_EACH_BB (bb)
2305 num_edges += EDGE_COUNT (bb->succs);
2306 size = num_edges * sizeof (struct edge_def);
2307 total += size;
2308 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2309
2310 fprintf (file, "---------------------------------------------------------\n");
2311 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2312 LABEL (total));
2313 fprintf (file, "---------------------------------------------------------\n");
2314 fprintf (file, "\n");
2315
2316 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2317 max_num_merged_labels = cfg_stats.num_merged_labels;
2318
2319 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2320 cfg_stats.num_merged_labels, max_num_merged_labels);
2321
2322 fprintf (file, "\n");
2323 }
2324
2325
2326 /* Dump CFG statistics on stderr. Keep extern so that it's always
2327 linked in the final executable. */
2328
2329 void
2330 debug_cfg_stats (void)
2331 {
2332 dump_cfg_stats (stderr);
2333 }
2334
2335
2336 /* Dump the flowgraph to a .vcg FILE. */
2337
2338 static void
2339 tree_cfg2vcg (FILE *file)
2340 {
2341 edge e;
2342 edge_iterator ei;
2343 basic_block bb;
2344 const char *funcname
2345 = lang_hooks.decl_printable_name (current_function_decl, 2);
2346
2347 /* Write the file header. */
2348 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2349 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2350 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2351
2352 /* Write blocks and edges. */
2353 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2354 {
2355 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2356 e->dest->index);
2357
2358 if (e->flags & EDGE_FAKE)
2359 fprintf (file, " linestyle: dotted priority: 10");
2360 else
2361 fprintf (file, " linestyle: solid priority: 100");
2362
2363 fprintf (file, " }\n");
2364 }
2365 fputc ('\n', file);
2366
2367 FOR_EACH_BB (bb)
2368 {
2369 enum tree_code head_code, end_code;
2370 const char *head_name, *end_name;
2371 int head_line = 0;
2372 int end_line = 0;
2373 tree first = first_stmt (bb);
2374 tree last = last_stmt (bb);
2375
2376 if (first)
2377 {
2378 head_code = TREE_CODE (first);
2379 head_name = tree_code_name[head_code];
2380 head_line = get_lineno (first);
2381 }
2382 else
2383 head_name = "no-statement";
2384
2385 if (last)
2386 {
2387 end_code = TREE_CODE (last);
2388 end_name = tree_code_name[end_code];
2389 end_line = get_lineno (last);
2390 }
2391 else
2392 end_name = "no-statement";
2393
2394 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2395 bb->index, bb->index, head_name, head_line, end_name,
2396 end_line);
2397
2398 FOR_EACH_EDGE (e, ei, bb->succs)
2399 {
2400 if (e->dest == EXIT_BLOCK_PTR)
2401 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2402 else
2403 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2404
2405 if (e->flags & EDGE_FAKE)
2406 fprintf (file, " priority: 10 linestyle: dotted");
2407 else
2408 fprintf (file, " priority: 100 linestyle: solid");
2409
2410 fprintf (file, " }\n");
2411 }
2412
2413 if (bb->next_bb != EXIT_BLOCK_PTR)
2414 fputc ('\n', file);
2415 }
2416
2417 fputs ("}\n\n", file);
2418 }
2419
2420
2421
2422 /*---------------------------------------------------------------------------
2423 Miscellaneous helpers
2424 ---------------------------------------------------------------------------*/
2425
2426 /* Return true if T represents a stmt that always transfers control. */
2427
2428 bool
2429 is_ctrl_stmt (tree t)
2430 {
2431 return (TREE_CODE (t) == COND_EXPR
2432 || TREE_CODE (t) == SWITCH_EXPR
2433 || TREE_CODE (t) == GOTO_EXPR
2434 || TREE_CODE (t) == RETURN_EXPR
2435 || TREE_CODE (t) == RESX_EXPR);
2436 }
2437
2438
2439 /* Return true if T is a statement that may alter the flow of control
2440 (e.g., a call to a non-returning function). */
2441
2442 bool
2443 is_ctrl_altering_stmt (tree t)
2444 {
2445 tree call;
2446
2447 gcc_assert (t);
2448 call = get_call_expr_in (t);
2449 if (call)
2450 {
2451 /* A non-pure/const CALL_EXPR alters flow control if the current
2452 function has nonlocal labels. */
2453 if (TREE_SIDE_EFFECTS (call) && current_function_has_nonlocal_label)
2454 return true;
2455
2456 /* A CALL_EXPR also alters control flow if it does not return. */
2457 if (call_expr_flags (call) & ECF_NORETURN)
2458 return true;
2459 }
2460
2461 /* If a statement can throw, it alters control flow. */
2462 return tree_can_throw_internal (t);
2463 }
2464
2465
2466 /* Return true if T is a computed goto. */
2467
2468 bool
2469 computed_goto_p (tree t)
2470 {
2471 return (TREE_CODE (t) == GOTO_EXPR
2472 && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
2473 }
2474
2475
2476 /* Checks whether EXPR is a simple local goto. */
2477
2478 bool
2479 simple_goto_p (tree expr)
2480 {
2481 return (TREE_CODE (expr) == GOTO_EXPR
2482 && TREE_CODE (GOTO_DESTINATION (expr)) == LABEL_DECL);
2483 }
2484
2485
2486 /* Return true if T should start a new basic block. PREV_T is the
2487 statement preceding T. It is used when T is a label or a case label.
2488 Labels should only start a new basic block if their previous statement
2489 wasn't a label. Otherwise, sequence of labels would generate
2490 unnecessary basic blocks that only contain a single label. */
2491
2492 static inline bool
2493 stmt_starts_bb_p (tree t, tree prev_t)
2494 {
2495 if (t == NULL_TREE)
2496 return false;
2497
2498 /* LABEL_EXPRs start a new basic block only if the preceding
2499 statement wasn't a label of the same type. This prevents the
2500 creation of consecutive blocks that have nothing but a single
2501 label. */
2502 if (TREE_CODE (t) == LABEL_EXPR)
2503 {
2504 /* Nonlocal and computed GOTO targets always start a new block. */
2505 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
2506 || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
2507 return true;
2508
2509 if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
2510 {
2511 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
2512 return true;
2513
2514 cfg_stats.num_merged_labels++;
2515 return false;
2516 }
2517 else
2518 return true;
2519 }
2520
2521 return false;
2522 }
2523
2524
2525 /* Return true if T should end a basic block. */
2526
2527 bool
2528 stmt_ends_bb_p (tree t)
2529 {
2530 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2531 }
2532
2533
2534 /* Add gotos that used to be represented implicitly in the CFG. */
2535
2536 void
2537 disband_implicit_edges (void)
2538 {
2539 basic_block bb;
2540 block_stmt_iterator last;
2541 edge e;
2542 edge_iterator ei;
2543 tree stmt, label;
2544
2545 FOR_EACH_BB (bb)
2546 {
2547 last = bsi_last (bb);
2548 stmt = last_stmt (bb);
2549
2550 if (stmt && TREE_CODE (stmt) == COND_EXPR)
2551 {
2552 /* Remove superfluous gotos from COND_EXPR branches. Moved
2553 from cfg_remove_useless_stmts here since it violates the
2554 invariants for tree--cfg correspondence and thus fits better
2555 here where we do it anyway. */
2556 e = find_edge (bb, bb->next_bb);
2557 if (e)
2558 {
2559 if (e->flags & EDGE_TRUE_VALUE)
2560 COND_EXPR_THEN (stmt) = build_empty_stmt ();
2561 else if (e->flags & EDGE_FALSE_VALUE)
2562 COND_EXPR_ELSE (stmt) = build_empty_stmt ();
2563 else
2564 gcc_unreachable ();
2565 e->flags |= EDGE_FALLTHRU;
2566 }
2567
2568 continue;
2569 }
2570
2571 if (stmt && TREE_CODE (stmt) == RETURN_EXPR)
2572 {
2573 /* Remove the RETURN_EXPR if we may fall though to the exit
2574 instead. */
2575 gcc_assert (single_succ_p (bb));
2576 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2577
2578 if (bb->next_bb == EXIT_BLOCK_PTR
2579 && !TREE_OPERAND (stmt, 0))
2580 {
2581 bsi_remove (&last);
2582 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2583 }
2584 continue;
2585 }
2586
2587 /* There can be no fallthru edge if the last statement is a control
2588 one. */
2589 if (stmt && is_ctrl_stmt (stmt))
2590 continue;
2591
2592 /* Find a fallthru edge and emit the goto if necessary. */
2593 FOR_EACH_EDGE (e, ei, bb->succs)
2594 if (e->flags & EDGE_FALLTHRU)
2595 break;
2596
2597 if (!e || e->dest == bb->next_bb)
2598 continue;
2599
2600 gcc_assert (e->dest != EXIT_BLOCK_PTR);
2601 label = tree_block_label (e->dest);
2602
2603 stmt = build1 (GOTO_EXPR, void_type_node, label);
2604 #ifdef USE_MAPPED_LOCATION
2605 SET_EXPR_LOCATION (stmt, e->goto_locus);
2606 #else
2607 SET_EXPR_LOCUS (stmt, e->goto_locus);
2608 #endif
2609 bsi_insert_after (&last, stmt, BSI_NEW_STMT);
2610 e->flags &= ~EDGE_FALLTHRU;
2611 }
2612 }
2613
2614 /* Remove block annotations and other datastructures. */
2615
2616 void
2617 delete_tree_cfg_annotations (void)
2618 {
2619 label_to_block_map = NULL;
2620 }
2621
2622
2623 /* Return the first statement in basic block BB. */
2624
2625 tree
2626 first_stmt (basic_block bb)
2627 {
2628 block_stmt_iterator i = bsi_start (bb);
2629 return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
2630 }
2631
2632
2633 /* Return the last statement in basic block BB. */
2634
2635 tree
2636 last_stmt (basic_block bb)
2637 {
2638 block_stmt_iterator b = bsi_last (bb);
2639 return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
2640 }
2641
2642
2643 /* Return a pointer to the last statement in block BB. */
2644
2645 tree *
2646 last_stmt_ptr (basic_block bb)
2647 {
2648 block_stmt_iterator last = bsi_last (bb);
2649 return !bsi_end_p (last) ? bsi_stmt_ptr (last) : NULL;
2650 }
2651
2652
2653 /* Return the last statement of an otherwise empty block. Return NULL
2654 if the block is totally empty, or if it contains more than one
2655 statement. */
2656
2657 tree
2658 last_and_only_stmt (basic_block bb)
2659 {
2660 block_stmt_iterator i = bsi_last (bb);
2661 tree last, prev;
2662
2663 if (bsi_end_p (i))
2664 return NULL_TREE;
2665
2666 last = bsi_stmt (i);
2667 bsi_prev (&i);
2668 if (bsi_end_p (i))
2669 return last;
2670
2671 /* Empty statements should no longer appear in the instruction stream.
2672 Everything that might have appeared before should be deleted by
2673 remove_useless_stmts, and the optimizers should just bsi_remove
2674 instead of smashing with build_empty_stmt.
2675
2676 Thus the only thing that should appear here in a block containing
2677 one executable statement is a label. */
2678 prev = bsi_stmt (i);
2679 if (TREE_CODE (prev) == LABEL_EXPR)
2680 return last;
2681 else
2682 return NULL_TREE;
2683 }
2684
2685
2686 /* Mark BB as the basic block holding statement T. */
2687
2688 void
2689 set_bb_for_stmt (tree t, basic_block bb)
2690 {
2691 if (TREE_CODE (t) == PHI_NODE)
2692 PHI_BB (t) = bb;
2693 else if (TREE_CODE (t) == STATEMENT_LIST)
2694 {
2695 tree_stmt_iterator i;
2696 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2697 set_bb_for_stmt (tsi_stmt (i), bb);
2698 }
2699 else
2700 {
2701 stmt_ann_t ann = get_stmt_ann (t);
2702 ann->bb = bb;
2703
2704 /* If the statement is a label, add the label to block-to-labels map
2705 so that we can speed up edge creation for GOTO_EXPRs. */
2706 if (TREE_CODE (t) == LABEL_EXPR)
2707 {
2708 int uid;
2709
2710 t = LABEL_EXPR_LABEL (t);
2711 uid = LABEL_DECL_UID (t);
2712 if (uid == -1)
2713 {
2714 LABEL_DECL_UID (t) = uid = cfun->last_label_uid++;
2715 if (VARRAY_SIZE (label_to_block_map) <= (unsigned) uid)
2716 VARRAY_GROW (label_to_block_map, 3 * uid / 2);
2717 }
2718 else
2719 /* We're moving an existing label. Make sure that we've
2720 removed it from the old block. */
2721 gcc_assert (!bb || !VARRAY_BB (label_to_block_map, uid));
2722 VARRAY_BB (label_to_block_map, uid) = bb;
2723 }
2724 }
2725 }
2726
2727 /* Finds iterator for STMT. */
2728
2729 extern block_stmt_iterator
2730 bsi_for_stmt (tree stmt)
2731 {
2732 block_stmt_iterator bsi;
2733
2734 for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
2735 if (bsi_stmt (bsi) == stmt)
2736 return bsi;
2737
2738 gcc_unreachable ();
2739 }
2740
2741 /* Mark statement T as modified, and update it. */
2742 static inline void
2743 update_modified_stmts (tree t)
2744 {
2745 if (TREE_CODE (t) == STATEMENT_LIST)
2746 {
2747 tree_stmt_iterator i;
2748 tree stmt;
2749 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
2750 {
2751 stmt = tsi_stmt (i);
2752 update_stmt_if_modified (stmt);
2753 }
2754 }
2755 else
2756 update_stmt_if_modified (t);
2757 }
2758
2759 /* Insert statement (or statement list) T before the statement
2760 pointed-to by iterator I. M specifies how to update iterator I
2761 after insertion (see enum bsi_iterator_update). */
2762
2763 void
2764 bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2765 {
2766 set_bb_for_stmt (t, i->bb);
2767 update_modified_stmts (t);
2768 tsi_link_before (&i->tsi, t, m);
2769 }
2770
2771
2772 /* Insert statement (or statement list) T after the statement
2773 pointed-to by iterator I. M specifies how to update iterator I
2774 after insertion (see enum bsi_iterator_update). */
2775
2776 void
2777 bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
2778 {
2779 set_bb_for_stmt (t, i->bb);
2780 update_modified_stmts (t);
2781 tsi_link_after (&i->tsi, t, m);
2782 }
2783
2784
2785 /* Remove the statement pointed to by iterator I. The iterator is updated
2786 to the next statement. */
2787
2788 void
2789 bsi_remove (block_stmt_iterator *i)
2790 {
2791 tree t = bsi_stmt (*i);
2792 set_bb_for_stmt (t, NULL);
2793 delink_stmt_imm_use (t);
2794 tsi_delink (&i->tsi);
2795 mark_stmt_modified (t);
2796 }
2797
2798
2799 /* Move the statement at FROM so it comes right after the statement at TO. */
2800
2801 void
2802 bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
2803 {
2804 tree stmt = bsi_stmt (*from);
2805 bsi_remove (from);
2806 bsi_insert_after (to, stmt, BSI_SAME_STMT);
2807 }
2808
2809
2810 /* Move the statement at FROM so it comes right before the statement at TO. */
2811
2812 void
2813 bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
2814 {
2815 tree stmt = bsi_stmt (*from);
2816 bsi_remove (from);
2817 bsi_insert_before (to, stmt, BSI_SAME_STMT);
2818 }
2819
2820
2821 /* Move the statement at FROM to the end of basic block BB. */
2822
2823 void
2824 bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
2825 {
2826 block_stmt_iterator last = bsi_last (bb);
2827
2828 /* Have to check bsi_end_p because it could be an empty block. */
2829 if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
2830 bsi_move_before (from, &last);
2831 else
2832 bsi_move_after (from, &last);
2833 }
2834
2835
2836 /* Replace the contents of the statement pointed to by iterator BSI
2837 with STMT. If PRESERVE_EH_INFO is true, the exception handling
2838 information of the original statement is preserved. */
2839
2840 void
2841 bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool preserve_eh_info)
2842 {
2843 int eh_region;
2844 tree orig_stmt = bsi_stmt (*bsi);
2845
2846 SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
2847 set_bb_for_stmt (stmt, bsi->bb);
2848
2849 /* Preserve EH region information from the original statement, if
2850 requested by the caller. */
2851 if (preserve_eh_info)
2852 {
2853 eh_region = lookup_stmt_eh_region (orig_stmt);
2854 if (eh_region >= 0)
2855 add_stmt_to_eh_region (stmt, eh_region);
2856 }
2857
2858 delink_stmt_imm_use (orig_stmt);
2859 *bsi_stmt_ptr (*bsi) = stmt;
2860 mark_stmt_modified (stmt);
2861 update_modified_stmts (stmt);
2862 }
2863
2864
2865 /* Insert the statement pointed-to by BSI into edge E. Every attempt
2866 is made to place the statement in an existing basic block, but
2867 sometimes that isn't possible. When it isn't possible, the edge is
2868 split and the statement is added to the new block.
2869
2870 In all cases, the returned *BSI points to the correct location. The
2871 return value is true if insertion should be done after the location,
2872 or false if it should be done before the location. If new basic block
2873 has to be created, it is stored in *NEW_BB. */
2874
2875 static bool
2876 tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
2877 basic_block *new_bb)
2878 {
2879 basic_block dest, src;
2880 tree tmp;
2881
2882 dest = e->dest;
2883 restart:
2884
2885 /* If the destination has one predecessor which has no PHI nodes,
2886 insert there. Except for the exit block.
2887
2888 The requirement for no PHI nodes could be relaxed. Basically we
2889 would have to examine the PHIs to prove that none of them used
2890 the value set by the statement we want to insert on E. That
2891 hardly seems worth the effort. */
2892 if (single_pred_p (dest)
2893 && ! phi_nodes (dest)
2894 && dest != EXIT_BLOCK_PTR)
2895 {
2896 *bsi = bsi_start (dest);
2897 if (bsi_end_p (*bsi))
2898 return true;
2899
2900 /* Make sure we insert after any leading labels. */
2901 tmp = bsi_stmt (*bsi);
2902 while (TREE_CODE (tmp) == LABEL_EXPR)
2903 {
2904 bsi_next (bsi);
2905 if (bsi_end_p (*bsi))
2906 break;
2907 tmp = bsi_stmt (*bsi);
2908 }
2909
2910 if (bsi_end_p (*bsi))
2911 {
2912 *bsi = bsi_last (dest);
2913 return true;
2914 }
2915 else
2916 return false;
2917 }
2918
2919 /* If the source has one successor, the edge is not abnormal and
2920 the last statement does not end a basic block, insert there.
2921 Except for the entry block. */
2922 src = e->src;
2923 if ((e->flags & EDGE_ABNORMAL) == 0
2924 && single_succ_p (src)
2925 && src != ENTRY_BLOCK_PTR)
2926 {
2927 *bsi = bsi_last (src);
2928 if (bsi_end_p (*bsi))
2929 return true;
2930
2931 tmp = bsi_stmt (*bsi);
2932 if (!stmt_ends_bb_p (tmp))
2933 return true;
2934
2935 /* Insert code just before returning the value. We may need to decompose
2936 the return in the case it contains non-trivial operand. */
2937 if (TREE_CODE (tmp) == RETURN_EXPR)
2938 {
2939 tree op = TREE_OPERAND (tmp, 0);
2940 if (op && !is_gimple_val (op))
2941 {
2942 gcc_assert (TREE_CODE (op) == MODIFY_EXPR);
2943 bsi_insert_before (bsi, op, BSI_NEW_STMT);
2944 TREE_OPERAND (tmp, 0) = TREE_OPERAND (op, 0);
2945 }
2946 bsi_prev (bsi);
2947 return true;
2948 }
2949 }
2950
2951 /* Otherwise, create a new basic block, and split this edge. */
2952 dest = split_edge (e);
2953 if (new_bb)
2954 *new_bb = dest;
2955 e = single_pred_edge (dest);
2956 goto restart;
2957 }
2958
2959
2960 /* This routine will commit all pending edge insertions, creating any new
2961 basic blocks which are necessary. */
2962
2963 void
2964 bsi_commit_edge_inserts (void)
2965 {
2966 basic_block bb;
2967 edge e;
2968 edge_iterator ei;
2969
2970 bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
2971
2972 FOR_EACH_BB (bb)
2973 FOR_EACH_EDGE (e, ei, bb->succs)
2974 bsi_commit_one_edge_insert (e, NULL);
2975 }
2976
2977
2978 /* Commit insertions pending at edge E. If a new block is created, set NEW_BB
2979 to this block, otherwise set it to NULL. */
2980
2981 void
2982 bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
2983 {
2984 if (new_bb)
2985 *new_bb = NULL;
2986 if (PENDING_STMT (e))
2987 {
2988 block_stmt_iterator bsi;
2989 tree stmt = PENDING_STMT (e);
2990
2991 PENDING_STMT (e) = NULL_TREE;
2992
2993 if (tree_find_edge_insert_loc (e, &bsi, new_bb))
2994 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2995 else
2996 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2997 }
2998 }
2999
3000
3001 /* Add STMT to the pending list of edge E. No actual insertion is
3002 made until a call to bsi_commit_edge_inserts () is made. */
3003
3004 void
3005 bsi_insert_on_edge (edge e, tree stmt)
3006 {
3007 append_to_statement_list (stmt, &PENDING_STMT (e));
3008 }
3009
3010 /* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts. If a new
3011 block has to be created, it is returned. */
3012
3013 basic_block
3014 bsi_insert_on_edge_immediate (edge e, tree stmt)
3015 {
3016 block_stmt_iterator bsi;
3017 basic_block new_bb = NULL;
3018
3019 gcc_assert (!PENDING_STMT (e));
3020
3021 if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
3022 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
3023 else
3024 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
3025
3026 return new_bb;
3027 }
3028
3029 /*---------------------------------------------------------------------------
3030 Tree specific functions for CFG manipulation
3031 ---------------------------------------------------------------------------*/
3032
3033 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
3034
3035 static void
3036 reinstall_phi_args (edge new_edge, edge old_edge)
3037 {
3038 tree var, phi;
3039
3040 if (!PENDING_STMT (old_edge))
3041 return;
3042
3043 for (var = PENDING_STMT (old_edge), phi = phi_nodes (new_edge->dest);
3044 var && phi;
3045 var = TREE_CHAIN (var), phi = PHI_CHAIN (phi))
3046 {
3047 tree result = TREE_PURPOSE (var);
3048 tree arg = TREE_VALUE (var);
3049
3050 gcc_assert (result == PHI_RESULT (phi));
3051
3052 add_phi_arg (phi, arg, new_edge);
3053 }
3054
3055 PENDING_STMT (old_edge) = NULL;
3056 }
3057
3058 /* Returns the basic block after that the new basic block created
3059 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3060 near its "logical" location. This is of most help to humans looking
3061 at debugging dumps. */
3062
3063 static basic_block
3064 split_edge_bb_loc (edge edge_in)
3065 {
3066 basic_block dest = edge_in->dest;
3067
3068 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
3069 return edge_in->src;
3070 else
3071 return dest->prev_bb;
3072 }
3073
3074 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3075 Abort on abnormal edges. */
3076
3077 static basic_block
3078 tree_split_edge (edge edge_in)
3079 {
3080 basic_block new_bb, after_bb, dest, src;
3081 edge new_edge, e;
3082
3083 /* Abnormal edges cannot be split. */
3084 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3085
3086 src = edge_in->src;
3087 dest = edge_in->dest;
3088
3089 after_bb = split_edge_bb_loc (edge_in);
3090
3091 new_bb = create_empty_bb (after_bb);
3092 new_bb->frequency = EDGE_FREQUENCY (edge_in);
3093 new_bb->count = edge_in->count;
3094 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
3095 new_edge->probability = REG_BR_PROB_BASE;
3096 new_edge->count = edge_in->count;
3097
3098 e = redirect_edge_and_branch (edge_in, new_bb);
3099 gcc_assert (e);
3100 reinstall_phi_args (new_edge, e);
3101
3102 return new_bb;
3103 }
3104
3105
3106 /* Return true when BB has label LABEL in it. */
3107
3108 static bool
3109 has_label_p (basic_block bb, tree label)
3110 {
3111 block_stmt_iterator bsi;
3112
3113 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3114 {
3115 tree stmt = bsi_stmt (bsi);
3116
3117 if (TREE_CODE (stmt) != LABEL_EXPR)
3118 return false;
3119 if (LABEL_EXPR_LABEL (stmt) == label)
3120 return true;
3121 }
3122 return false;
3123 }
3124
3125
3126 /* Callback for walk_tree, check that all elements with address taken are
3127 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3128 inside a PHI node. */
3129
3130 static tree
3131 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3132 {
3133 tree t = *tp, x;
3134 bool in_phi = (data != NULL);
3135
3136 if (TYPE_P (t))
3137 *walk_subtrees = 0;
3138
3139 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3140 #define CHECK_OP(N, MSG) \
3141 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3142 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3143
3144 switch (TREE_CODE (t))
3145 {
3146 case SSA_NAME:
3147 if (SSA_NAME_IN_FREE_LIST (t))
3148 {
3149 error ("SSA name in freelist but still referenced");
3150 return *tp;
3151 }
3152 break;
3153
3154 case ASSERT_EXPR:
3155 x = fold (ASSERT_EXPR_COND (t));
3156 if (x == boolean_false_node)
3157 {
3158 error ("ASSERT_EXPR with an always-false condition");
3159 return *tp;
3160 }
3161 break;
3162
3163 case MODIFY_EXPR:
3164 x = TREE_OPERAND (t, 0);
3165 if (TREE_CODE (x) == BIT_FIELD_REF
3166 && is_gimple_reg (TREE_OPERAND (x, 0)))
3167 {
3168 error ("GIMPLE register modified with BIT_FIELD_REF");
3169 return t;
3170 }
3171 break;
3172
3173 case ADDR_EXPR:
3174 {
3175 bool old_invariant;
3176 bool old_constant;
3177 bool old_side_effects;
3178 bool new_invariant;
3179 bool new_constant;
3180 bool new_side_effects;
3181
3182 /* ??? tree-ssa-alias.c may have overlooked dead PHI nodes, missing
3183 dead PHIs that take the address of something. But if the PHI
3184 result is dead, the fact that it takes the address of anything
3185 is irrelevant. Because we can not tell from here if a PHI result
3186 is dead, we just skip this check for PHIs altogether. This means
3187 we may be missing "valid" checks, but what can you do?
3188 This was PR19217. */
3189 if (in_phi)
3190 break;
3191
3192 old_invariant = TREE_INVARIANT (t);
3193 old_constant = TREE_CONSTANT (t);
3194 old_side_effects = TREE_SIDE_EFFECTS (t);
3195
3196 recompute_tree_invarant_for_addr_expr (t);
3197 new_invariant = TREE_INVARIANT (t);
3198 new_side_effects = TREE_SIDE_EFFECTS (t);
3199 new_constant = TREE_CONSTANT (t);
3200
3201 if (old_invariant != new_invariant)
3202 {
3203 error ("invariant not recomputed when ADDR_EXPR changed");
3204 return t;
3205 }
3206
3207 if (old_constant != new_constant)
3208 {
3209 error ("constant not recomputed when ADDR_EXPR changed");
3210 return t;
3211 }
3212 if (old_side_effects != new_side_effects)
3213 {
3214 error ("side effects not recomputed when ADDR_EXPR changed");
3215 return t;
3216 }
3217
3218 /* Skip any references (they will be checked when we recurse down the
3219 tree) and ensure that any variable used as a prefix is marked
3220 addressable. */
3221 for (x = TREE_OPERAND (t, 0);
3222 handled_component_p (x);
3223 x = TREE_OPERAND (x, 0))
3224 ;
3225
3226 if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
3227 return NULL;
3228 if (!TREE_ADDRESSABLE (x))
3229 {
3230 error ("address taken, but ADDRESSABLE bit not set");
3231 return x;
3232 }
3233 break;
3234 }
3235
3236 case COND_EXPR:
3237 x = COND_EXPR_COND (t);
3238 if (TREE_CODE (TREE_TYPE (x)) != BOOLEAN_TYPE)
3239 {
3240 error ("non-boolean used in condition");
3241 return x;
3242 }
3243 if (!is_gimple_condexpr (x))
3244 {
3245 error ("invalid conditional operand");
3246 return x;
3247 }
3248 break;
3249
3250 case NOP_EXPR:
3251 case CONVERT_EXPR:
3252 case FIX_TRUNC_EXPR:
3253 case FIX_CEIL_EXPR:
3254 case FIX_FLOOR_EXPR:
3255 case FIX_ROUND_EXPR:
3256 case FLOAT_EXPR:
3257 case NEGATE_EXPR:
3258 case ABS_EXPR:
3259 case BIT_NOT_EXPR:
3260 case NON_LVALUE_EXPR:
3261 case TRUTH_NOT_EXPR:
3262 CHECK_OP (0, "invalid operand to unary operator");
3263 break;
3264
3265 case REALPART_EXPR:
3266 case IMAGPART_EXPR:
3267 case COMPONENT_REF:
3268 case ARRAY_REF:
3269 case ARRAY_RANGE_REF:
3270 case BIT_FIELD_REF:
3271 case VIEW_CONVERT_EXPR:
3272 /* We have a nest of references. Verify that each of the operands
3273 that determine where to reference is either a constant or a variable,
3274 verify that the base is valid, and then show we've already checked
3275 the subtrees. */
3276 while (handled_component_p (t))
3277 {
3278 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3279 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3280 else if (TREE_CODE (t) == ARRAY_REF
3281 || TREE_CODE (t) == ARRAY_RANGE_REF)
3282 {
3283 CHECK_OP (1, "invalid array index");
3284 if (TREE_OPERAND (t, 2))
3285 CHECK_OP (2, "invalid array lower bound");
3286 if (TREE_OPERAND (t, 3))
3287 CHECK_OP (3, "invalid array stride");
3288 }
3289 else if (TREE_CODE (t) == BIT_FIELD_REF)
3290 {
3291 CHECK_OP (1, "invalid operand to BIT_FIELD_REF");
3292 CHECK_OP (2, "invalid operand to BIT_FIELD_REF");
3293 }
3294
3295 t = TREE_OPERAND (t, 0);
3296 }
3297
3298 if (!CONSTANT_CLASS_P (t) && !is_gimple_lvalue (t))
3299 {
3300 error ("invalid reference prefix");
3301 return t;
3302 }
3303 *walk_subtrees = 0;
3304 break;
3305
3306 case LT_EXPR:
3307 case LE_EXPR:
3308 case GT_EXPR:
3309 case GE_EXPR:
3310 case EQ_EXPR:
3311 case NE_EXPR:
3312 case UNORDERED_EXPR:
3313 case ORDERED_EXPR:
3314 case UNLT_EXPR:
3315 case UNLE_EXPR:
3316 case UNGT_EXPR:
3317 case UNGE_EXPR:
3318 case UNEQ_EXPR:
3319 case LTGT_EXPR:
3320 case PLUS_EXPR:
3321 case MINUS_EXPR:
3322 case MULT_EXPR:
3323 case TRUNC_DIV_EXPR:
3324 case CEIL_DIV_EXPR:
3325 case FLOOR_DIV_EXPR:
3326 case ROUND_DIV_EXPR:
3327 case TRUNC_MOD_EXPR:
3328 case CEIL_MOD_EXPR:
3329 case FLOOR_MOD_EXPR:
3330 case ROUND_MOD_EXPR:
3331 case RDIV_EXPR:
3332 case EXACT_DIV_EXPR:
3333 case MIN_EXPR:
3334 case MAX_EXPR:
3335 case LSHIFT_EXPR:
3336 case RSHIFT_EXPR:
3337 case LROTATE_EXPR:
3338 case RROTATE_EXPR:
3339 case BIT_IOR_EXPR:
3340 case BIT_XOR_EXPR:
3341 case BIT_AND_EXPR:
3342 CHECK_OP (0, "invalid operand to binary operator");
3343 CHECK_OP (1, "invalid operand to binary operator");
3344 break;
3345
3346 default:
3347 break;
3348 }
3349 return NULL;
3350
3351 #undef CHECK_OP
3352 }
3353
3354
3355 /* Verify STMT, return true if STMT is not in GIMPLE form.
3356 TODO: Implement type checking. */
3357
3358 static bool
3359 verify_stmt (tree stmt, bool last_in_block)
3360 {
3361 tree addr;
3362
3363 if (!is_gimple_stmt (stmt))
3364 {
3365 error ("is not a valid GIMPLE statement");
3366 goto fail;
3367 }
3368
3369 addr = walk_tree (&stmt, verify_expr, NULL, NULL);
3370 if (addr)
3371 {
3372 debug_generic_stmt (addr);
3373 return true;
3374 }
3375
3376 /* If the statement is marked as part of an EH region, then it is
3377 expected that the statement could throw. Verify that when we
3378 have optimizations that simplify statements such that we prove
3379 that they cannot throw, that we update other data structures
3380 to match. */
3381 if (lookup_stmt_eh_region (stmt) >= 0)
3382 {
3383 if (!tree_could_throw_p (stmt))
3384 {
3385 error ("statement marked for throw, but doesn%'t");
3386 goto fail;
3387 }
3388 if (!last_in_block && tree_can_throw_internal (stmt))
3389 {
3390 error ("statement marked for throw in middle of block");
3391 goto fail;
3392 }
3393 }
3394
3395 return false;
3396
3397 fail:
3398 debug_generic_stmt (stmt);
3399 return true;
3400 }
3401
3402
3403 /* Return true when the T can be shared. */
3404
3405 static bool
3406 tree_node_can_be_shared (tree t)
3407 {
3408 if (IS_TYPE_OR_DECL_P (t)
3409 /* We check for constants explicitly since they are not considered
3410 gimple invariants if they overflowed. */
3411 || CONSTANT_CLASS_P (t)
3412 || is_gimple_min_invariant (t)
3413 || TREE_CODE (t) == SSA_NAME
3414 || t == error_mark_node)
3415 return true;
3416
3417 if (TREE_CODE (t) == CASE_LABEL_EXPR)
3418 return true;
3419
3420 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3421 /* We check for constants explicitly since they are not considered
3422 gimple invariants if they overflowed. */
3423 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 1))
3424 || is_gimple_min_invariant (TREE_OPERAND (t, 1))))
3425 || (TREE_CODE (t) == COMPONENT_REF
3426 || TREE_CODE (t) == REALPART_EXPR
3427 || TREE_CODE (t) == IMAGPART_EXPR))
3428 t = TREE_OPERAND (t, 0);
3429
3430 if (DECL_P (t))
3431 return true;
3432
3433 return false;
3434 }
3435
3436
3437 /* Called via walk_trees. Verify tree sharing. */
3438
3439 static tree
3440 verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
3441 {
3442 htab_t htab = (htab_t) data;
3443 void **slot;
3444
3445 if (tree_node_can_be_shared (*tp))
3446 {
3447 *walk_subtrees = false;
3448 return NULL;
3449 }
3450
3451 slot = htab_find_slot (htab, *tp, INSERT);
3452 if (*slot)
3453 return *slot;
3454 *slot = *tp;
3455
3456 return NULL;
3457 }
3458
3459
3460 /* Verify the GIMPLE statement chain. */
3461
3462 void
3463 verify_stmts (void)
3464 {
3465 basic_block bb;
3466 block_stmt_iterator bsi;
3467 bool err = false;
3468 htab_t htab;
3469 tree addr;
3470
3471 timevar_push (TV_TREE_STMT_VERIFY);
3472 htab = htab_create (37, htab_hash_pointer, htab_eq_pointer, NULL);
3473
3474 FOR_EACH_BB (bb)
3475 {
3476 tree phi;
3477 int i;
3478
3479 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3480 {
3481 int phi_num_args = PHI_NUM_ARGS (phi);
3482
3483 if (bb_for_stmt (phi) != bb)
3484 {
3485 error ("bb_for_stmt (phi) is set to a wrong basic block");
3486 err |= true;
3487 }
3488
3489 for (i = 0; i < phi_num_args; i++)
3490 {
3491 tree t = PHI_ARG_DEF (phi, i);
3492 tree addr;
3493
3494 /* Addressable variables do have SSA_NAMEs but they
3495 are not considered gimple values. */
3496 if (TREE_CODE (t) != SSA_NAME
3497 && TREE_CODE (t) != FUNCTION_DECL
3498 && !is_gimple_val (t))
3499 {
3500 error ("PHI def is not a GIMPLE value");
3501 debug_generic_stmt (phi);
3502 debug_generic_stmt (t);
3503 err |= true;
3504 }
3505
3506 addr = walk_tree (&t, verify_expr, (void *) 1, NULL);
3507 if (addr)
3508 {
3509 debug_generic_stmt (addr);
3510 err |= true;
3511 }
3512
3513 addr = walk_tree (&t, verify_node_sharing, htab, NULL);
3514 if (addr)
3515 {
3516 error ("incorrect sharing of tree nodes");
3517 debug_generic_stmt (phi);
3518 debug_generic_stmt (addr);
3519 err |= true;
3520 }
3521 }
3522 }
3523
3524 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
3525 {
3526 tree stmt = bsi_stmt (bsi);
3527
3528 if (bb_for_stmt (stmt) != bb)
3529 {
3530 error ("bb_for_stmt (stmt) is set to a wrong basic block");
3531 err |= true;
3532 }
3533
3534 bsi_next (&bsi);
3535 err |= verify_stmt (stmt, bsi_end_p (bsi));
3536 addr = walk_tree (&stmt, verify_node_sharing, htab, NULL);
3537 if (addr)
3538 {
3539 error ("incorrect sharing of tree nodes");
3540 debug_generic_stmt (stmt);
3541 debug_generic_stmt (addr);
3542 err |= true;
3543 }
3544 }
3545 }
3546
3547 if (err)
3548 internal_error ("verify_stmts failed");
3549
3550 htab_delete (htab);
3551 timevar_pop (TV_TREE_STMT_VERIFY);
3552 }
3553
3554
3555 /* Verifies that the flow information is OK. */
3556
3557 static int
3558 tree_verify_flow_info (void)
3559 {
3560 int err = 0;
3561 basic_block bb;
3562 block_stmt_iterator bsi;
3563 tree stmt;
3564 edge e;
3565 edge_iterator ei;
3566
3567 if (ENTRY_BLOCK_PTR->stmt_list)
3568 {
3569 error ("ENTRY_BLOCK has a statement list associated with it");
3570 err = 1;
3571 }
3572
3573 if (EXIT_BLOCK_PTR->stmt_list)
3574 {
3575 error ("EXIT_BLOCK has a statement list associated with it");
3576 err = 1;
3577 }
3578
3579 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3580 if (e->flags & EDGE_FALLTHRU)
3581 {
3582 error ("fallthru to exit from bb %d", e->src->index);
3583 err = 1;
3584 }
3585
3586 FOR_EACH_BB (bb)
3587 {
3588 bool found_ctrl_stmt = false;
3589
3590 stmt = NULL_TREE;
3591
3592 /* Skip labels on the start of basic block. */
3593 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
3594 {
3595 tree prev_stmt = stmt;
3596
3597 stmt = bsi_stmt (bsi);
3598
3599 if (TREE_CODE (stmt) != LABEL_EXPR)
3600 break;
3601
3602 if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
3603 {
3604 error ("nonlocal label %s is not first "
3605 "in a sequence of labels in bb %d",
3606 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3607 bb->index);
3608 err = 1;
3609 }
3610
3611 if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
3612 {
3613 error ("label %s to block does not match in bb %d",
3614 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3615 bb->index);
3616 err = 1;
3617 }
3618
3619 if (decl_function_context (LABEL_EXPR_LABEL (stmt))
3620 != current_function_decl)
3621 {
3622 error ("label %s has incorrect context in bb %d",
3623 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3624 bb->index);
3625 err = 1;
3626 }
3627 }
3628
3629 /* Verify that body of basic block BB is free of control flow. */
3630 for (; !bsi_end_p (bsi); bsi_next (&bsi))
3631 {
3632 tree stmt = bsi_stmt (bsi);
3633
3634 if (found_ctrl_stmt)
3635 {
3636 error ("control flow in the middle of basic block %d",
3637 bb->index);
3638 err = 1;
3639 }
3640
3641 if (stmt_ends_bb_p (stmt))
3642 found_ctrl_stmt = true;
3643
3644 if (TREE_CODE (stmt) == LABEL_EXPR)
3645 {
3646 error ("label %s in the middle of basic block %d",
3647 IDENTIFIER_POINTER (DECL_NAME (LABEL_EXPR_LABEL (stmt))),
3648 bb->index);
3649 err = 1;
3650 }
3651 }
3652 bsi = bsi_last (bb);
3653 if (bsi_end_p (bsi))
3654 continue;
3655
3656 stmt = bsi_stmt (bsi);
3657
3658 err |= verify_eh_edges (stmt);
3659
3660 if (is_ctrl_stmt (stmt))
3661 {
3662 FOR_EACH_EDGE (e, ei, bb->succs)
3663 if (e->flags & EDGE_FALLTHRU)
3664 {
3665 error ("fallthru edge after a control statement in bb %d",
3666 bb->index);
3667 err = 1;
3668 }
3669 }
3670
3671 switch (TREE_CODE (stmt))
3672 {
3673 case COND_EXPR:
3674 {
3675 edge true_edge;
3676 edge false_edge;
3677 if (TREE_CODE (COND_EXPR_THEN (stmt)) != GOTO_EXPR
3678 || TREE_CODE (COND_EXPR_ELSE (stmt)) != GOTO_EXPR)
3679 {
3680 error ("structured COND_EXPR at the end of bb %d", bb->index);
3681 err = 1;
3682 }
3683
3684 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
3685
3686 if (!true_edge || !false_edge
3687 || !(true_edge->flags & EDGE_TRUE_VALUE)
3688 || !(false_edge->flags & EDGE_FALSE_VALUE)
3689 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3690 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
3691 || EDGE_COUNT (bb->succs) >= 3)
3692 {
3693 error ("wrong outgoing edge flags at end of bb %d",
3694 bb->index);
3695 err = 1;
3696 }
3697
3698 if (!has_label_p (true_edge->dest,
3699 GOTO_DESTINATION (COND_EXPR_THEN (stmt))))
3700 {
3701 error ("%<then%> label does not match edge at end of bb %d",
3702 bb->index);
3703 err = 1;
3704 }
3705
3706 if (!has_label_p (false_edge->dest,
3707 GOTO_DESTINATION (COND_EXPR_ELSE (stmt))))
3708 {
3709 error ("%<else%> label does not match edge at end of bb %d",
3710 bb->index);
3711 err = 1;
3712 }
3713 }
3714 break;
3715
3716 case GOTO_EXPR:
3717 if (simple_goto_p (stmt))
3718 {
3719 error ("explicit goto at end of bb %d", bb->index);
3720 err = 1;
3721 }
3722 else
3723 {
3724 /* FIXME. We should double check that the labels in the
3725 destination blocks have their address taken. */
3726 FOR_EACH_EDGE (e, ei, bb->succs)
3727 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
3728 | EDGE_FALSE_VALUE))
3729 || !(e->flags & EDGE_ABNORMAL))
3730 {
3731 error ("wrong outgoing edge flags at end of bb %d",
3732 bb->index);
3733 err = 1;
3734 }
3735 }
3736 break;
3737
3738 case RETURN_EXPR:
3739 if (!single_succ_p (bb)
3740 || (single_succ_edge (bb)->flags
3741 & (EDGE_FALLTHRU | EDGE_ABNORMAL
3742 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3743 {
3744 error ("wrong outgoing edge flags at end of bb %d", bb->index);
3745 err = 1;
3746 }
3747 if (single_succ (bb) != EXIT_BLOCK_PTR)
3748 {
3749 error ("return edge does not point to exit in bb %d",
3750 bb->index);
3751 err = 1;
3752 }
3753 break;
3754
3755 case SWITCH_EXPR:
3756 {
3757 tree prev;
3758 edge e;
3759 size_t i, n;
3760 tree vec;
3761
3762 vec = SWITCH_LABELS (stmt);
3763 n = TREE_VEC_LENGTH (vec);
3764
3765 /* Mark all the destination basic blocks. */
3766 for (i = 0; i < n; ++i)
3767 {
3768 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3769 basic_block label_bb = label_to_block (lab);
3770
3771 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
3772 label_bb->aux = (void *)1;
3773 }
3774
3775 /* Verify that the case labels are sorted. */
3776 prev = TREE_VEC_ELT (vec, 0);
3777 for (i = 1; i < n - 1; ++i)
3778 {
3779 tree c = TREE_VEC_ELT (vec, i);
3780 if (! CASE_LOW (c))
3781 {
3782 error ("found default case not at end of case vector");
3783 err = 1;
3784 continue;
3785 }
3786 if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
3787 {
3788 error ("case labels not sorted:");
3789 print_generic_expr (stderr, prev, 0);
3790 fprintf (stderr," is greater than ");
3791 print_generic_expr (stderr, c, 0);
3792 fprintf (stderr," but comes before it.\n");
3793 err = 1;
3794 }
3795 prev = c;
3796 }
3797 if (CASE_LOW (TREE_VEC_ELT (vec, n - 1)))
3798 {
3799 error ("no default case found at end of case vector");
3800 err = 1;
3801 }
3802
3803 FOR_EACH_EDGE (e, ei, bb->succs)
3804 {
3805 if (!e->dest->aux)
3806 {
3807 error ("extra outgoing edge %d->%d",
3808 bb->index, e->dest->index);
3809 err = 1;
3810 }
3811 e->dest->aux = (void *)2;
3812 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
3813 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
3814 {
3815 error ("wrong outgoing edge flags at end of bb %d",
3816 bb->index);
3817 err = 1;
3818 }
3819 }
3820
3821 /* Check that we have all of them. */
3822 for (i = 0; i < n; ++i)
3823 {
3824 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
3825 basic_block label_bb = label_to_block (lab);
3826
3827 if (label_bb->aux != (void *)2)
3828 {
3829 error ("missing edge %i->%i",
3830 bb->index, label_bb->index);
3831 err = 1;
3832 }
3833 }
3834
3835 FOR_EACH_EDGE (e, ei, bb->succs)
3836 e->dest->aux = (void *)0;
3837 }
3838
3839 default: ;
3840 }
3841 }
3842
3843 if (dom_computed[CDI_DOMINATORS] >= DOM_NO_FAST_QUERY)
3844 verify_dominators (CDI_DOMINATORS);
3845
3846 return err;
3847 }
3848
3849
3850 /* Updates phi nodes after creating a forwarder block joined
3851 by edge FALLTHRU. */
3852
3853 static void
3854 tree_make_forwarder_block (edge fallthru)
3855 {
3856 edge e;
3857 edge_iterator ei;
3858 basic_block dummy, bb;
3859 tree phi, new_phi, var;
3860
3861 dummy = fallthru->src;
3862 bb = fallthru->dest;
3863
3864 if (single_pred_p (bb))
3865 return;
3866
3867 /* If we redirected a branch we must create new phi nodes at the
3868 start of BB. */
3869 for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
3870 {
3871 var = PHI_RESULT (phi);
3872 new_phi = create_phi_node (var, bb);
3873 SSA_NAME_DEF_STMT (var) = new_phi;
3874 SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
3875 add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
3876 }
3877
3878 /* Ensure that the PHI node chain is in the same order. */
3879 set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
3880
3881 /* Add the arguments we have stored on edges. */
3882 FOR_EACH_EDGE (e, ei, bb->preds)
3883 {
3884 if (e == fallthru)
3885 continue;
3886
3887 flush_pending_stmts (e);
3888 }
3889 }
3890
3891
3892 /* Return a non-special label in the head of basic block BLOCK.
3893 Create one if it doesn't exist. */
3894
3895 tree
3896 tree_block_label (basic_block bb)
3897 {
3898 block_stmt_iterator i, s = bsi_start (bb);
3899 bool first = true;
3900 tree label, stmt;
3901
3902 for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
3903 {
3904 stmt = bsi_stmt (i);
3905 if (TREE_CODE (stmt) != LABEL_EXPR)
3906 break;
3907 label = LABEL_EXPR_LABEL (stmt);
3908 if (!DECL_NONLOCAL (label))
3909 {
3910 if (!first)
3911 bsi_move_before (&i, &s);
3912 return label;
3913 }
3914 }
3915
3916 label = create_artificial_label ();
3917 stmt = build1 (LABEL_EXPR, void_type_node, label);
3918 bsi_insert_before (&s, stmt, BSI_NEW_STMT);
3919 return label;
3920 }
3921
3922
3923 /* Attempt to perform edge redirection by replacing a possibly complex
3924 jump instruction by a goto or by removing the jump completely.
3925 This can apply only if all edges now point to the same block. The
3926 parameters and return values are equivalent to
3927 redirect_edge_and_branch. */
3928
3929 static edge
3930 tree_try_redirect_by_replacing_jump (edge e, basic_block target)
3931 {
3932 basic_block src = e->src;
3933 block_stmt_iterator b;
3934 tree stmt;
3935
3936 /* We can replace or remove a complex jump only when we have exactly
3937 two edges. */
3938 if (EDGE_COUNT (src->succs) != 2
3939 /* Verify that all targets will be TARGET. Specifically, the
3940 edge that is not E must also go to TARGET. */
3941 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
3942 return NULL;
3943
3944 b = bsi_last (src);
3945 if (bsi_end_p (b))
3946 return NULL;
3947 stmt = bsi_stmt (b);
3948
3949 if (TREE_CODE (stmt) == COND_EXPR
3950 || TREE_CODE (stmt) == SWITCH_EXPR)
3951 {
3952 bsi_remove (&b);
3953 e = ssa_redirect_edge (e, target);
3954 e->flags = EDGE_FALLTHRU;
3955 return e;
3956 }
3957
3958 return NULL;
3959 }
3960
3961
3962 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
3963 edge representing the redirected branch. */
3964
3965 static edge
3966 tree_redirect_edge_and_branch (edge e, basic_block dest)
3967 {
3968 basic_block bb = e->src;
3969 block_stmt_iterator bsi;
3970 edge ret;
3971 tree label, stmt;
3972
3973 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3974 return NULL;
3975
3976 if (e->src != ENTRY_BLOCK_PTR
3977 && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
3978 return ret;
3979
3980 if (e->dest == dest)
3981 return NULL;
3982
3983 label = tree_block_label (dest);
3984
3985 bsi = bsi_last (bb);
3986 stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
3987
3988 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
3989 {
3990 case COND_EXPR:
3991 stmt = (e->flags & EDGE_TRUE_VALUE
3992 ? COND_EXPR_THEN (stmt)
3993 : COND_EXPR_ELSE (stmt));
3994 GOTO_DESTINATION (stmt) = label;
3995 break;
3996
3997 case GOTO_EXPR:
3998 /* No non-abnormal edges should lead from a non-simple goto, and
3999 simple ones should be represented implicitly. */
4000 gcc_unreachable ();
4001
4002 case SWITCH_EXPR:
4003 {
4004 tree cases = get_cases_for_edge (e, stmt);
4005
4006 /* If we have a list of cases associated with E, then use it
4007 as it's a lot faster than walking the entire case vector. */
4008 if (cases)
4009 {
4010 edge e2 = find_edge (e->src, dest);
4011 tree last, first;
4012
4013 first = cases;
4014 while (cases)
4015 {
4016 last = cases;
4017 CASE_LABEL (cases) = label;
4018 cases = TREE_CHAIN (cases);
4019 }
4020
4021 /* If there was already an edge in the CFG, then we need
4022 to move all the cases associated with E to E2. */
4023 if (e2)
4024 {
4025 tree cases2 = get_cases_for_edge (e2, stmt);
4026
4027 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4028 TREE_CHAIN (cases2) = first;
4029 }
4030 }
4031 else
4032 {
4033 tree vec = SWITCH_LABELS (stmt);
4034 size_t i, n = TREE_VEC_LENGTH (vec);
4035
4036 for (i = 0; i < n; i++)
4037 {
4038 tree elt = TREE_VEC_ELT (vec, i);
4039
4040 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4041 CASE_LABEL (elt) = label;
4042 }
4043 }
4044
4045 break;
4046 }
4047
4048 case RETURN_EXPR:
4049 bsi_remove (&bsi);
4050 e->flags |= EDGE_FALLTHRU;
4051 break;
4052
4053 default:
4054 /* Otherwise it must be a fallthru edge, and we don't need to
4055 do anything besides redirecting it. */
4056 gcc_assert (e->flags & EDGE_FALLTHRU);
4057 break;
4058 }
4059
4060 /* Update/insert PHI nodes as necessary. */
4061
4062 /* Now update the edges in the CFG. */
4063 e = ssa_redirect_edge (e, dest);
4064
4065 return e;
4066 }
4067
4068
4069 /* Simple wrapper, as we can always redirect fallthru edges. */
4070
4071 static basic_block
4072 tree_redirect_edge_and_branch_force (edge e, basic_block dest)
4073 {
4074 e = tree_redirect_edge_and_branch (e, dest);
4075 gcc_assert (e);
4076
4077 return NULL;
4078 }
4079
4080
4081 /* Splits basic block BB after statement STMT (but at least after the
4082 labels). If STMT is NULL, BB is split just after the labels. */
4083
4084 static basic_block
4085 tree_split_block (basic_block bb, void *stmt)
4086 {
4087 block_stmt_iterator bsi, bsi_tgt;
4088 tree act;
4089 basic_block new_bb;
4090 edge e;
4091 edge_iterator ei;
4092
4093 new_bb = create_empty_bb (bb);
4094
4095 /* Redirect the outgoing edges. */
4096 new_bb->succs = bb->succs;
4097 bb->succs = NULL;
4098 FOR_EACH_EDGE (e, ei, new_bb->succs)
4099 e->src = new_bb;
4100
4101 if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
4102 stmt = NULL;
4103
4104 /* Move everything from BSI to the new basic block. */
4105 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4106 {
4107 act = bsi_stmt (bsi);
4108 if (TREE_CODE (act) == LABEL_EXPR)
4109 continue;
4110
4111 if (!stmt)
4112 break;
4113
4114 if (stmt == act)
4115 {
4116 bsi_next (&bsi);
4117 break;
4118 }
4119 }
4120
4121 bsi_tgt = bsi_start (new_bb);
4122 while (!bsi_end_p (bsi))
4123 {
4124 act = bsi_stmt (bsi);
4125 bsi_remove (&bsi);
4126 bsi_insert_after (&bsi_tgt, act, BSI_NEW_STMT);
4127 }
4128
4129 return new_bb;
4130 }
4131
4132
4133 /* Moves basic block BB after block AFTER. */
4134
4135 static bool
4136 tree_move_block_after (basic_block bb, basic_block after)
4137 {
4138 if (bb->prev_bb == after)
4139 return true;
4140
4141 unlink_block (bb);
4142 link_block (bb, after);
4143
4144 return true;
4145 }
4146
4147
4148 /* Return true if basic_block can be duplicated. */
4149
4150 static bool
4151 tree_can_duplicate_bb_p (basic_block bb ATTRIBUTE_UNUSED)
4152 {
4153 return true;
4154 }
4155
4156
4157 /* Create a duplicate of the basic block BB. NOTE: This does not
4158 preserve SSA form. */
4159
4160 static basic_block
4161 tree_duplicate_bb (basic_block bb)
4162 {
4163 basic_block new_bb;
4164 block_stmt_iterator bsi, bsi_tgt;
4165 tree phi;
4166
4167 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4168
4169 /* Copy the PHI nodes. We ignore PHI node arguments here because
4170 the incoming edges have not been setup yet. */
4171 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
4172 {
4173 tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
4174 create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
4175 }
4176
4177 /* Keep the chain of PHI nodes in the same order so that they can be
4178 updated by ssa_redirect_edge. */
4179 set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
4180
4181 bsi_tgt = bsi_start (new_bb);
4182 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
4183 {
4184 def_operand_p def_p;
4185 ssa_op_iter op_iter;
4186 tree stmt, copy;
4187 int region;
4188
4189 stmt = bsi_stmt (bsi);
4190 if (TREE_CODE (stmt) == LABEL_EXPR)
4191 continue;
4192
4193 /* Create a new copy of STMT and duplicate STMT's virtual
4194 operands. */
4195 copy = unshare_expr (stmt);
4196 bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
4197 copy_virtual_operands (copy, stmt);
4198 region = lookup_stmt_eh_region (stmt);
4199 if (region >= 0)
4200 add_stmt_to_eh_region (copy, region);
4201
4202 /* Create new names for all the definitions created by COPY and
4203 add replacement mappings for each new name. */
4204 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
4205 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
4206 }
4207
4208 return new_bb;
4209 }
4210
4211
4212 /* Basic block BB_COPY was created by code duplication. Add phi node
4213 arguments for edges going out of BB_COPY. The blocks that were
4214 duplicated have BB_DUPLICATED set. */
4215
4216 void
4217 add_phi_args_after_copy_bb (basic_block bb_copy)
4218 {
4219 basic_block bb, dest;
4220 edge e, e_copy;
4221 edge_iterator ei;
4222 tree phi, phi_copy, phi_next, def;
4223
4224 bb = get_bb_original (bb_copy);
4225
4226 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
4227 {
4228 if (!phi_nodes (e_copy->dest))
4229 continue;
4230
4231 if (e_copy->dest->flags & BB_DUPLICATED)
4232 dest = get_bb_original (e_copy->dest);
4233 else
4234 dest = e_copy->dest;
4235
4236 e = find_edge (bb, dest);
4237 if (!e)
4238 {
4239 /* During loop unrolling the target of the latch edge is copied.
4240 In this case we are not looking for edge to dest, but to
4241 duplicated block whose original was dest. */
4242 FOR_EACH_EDGE (e, ei, bb->succs)
4243 if ((e->dest->flags & BB_DUPLICATED)
4244 && get_bb_original (e->dest) == dest)
4245 break;
4246
4247 gcc_assert (e != NULL);
4248 }
4249
4250 for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
4251 phi;
4252 phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
4253 {
4254 phi_next = PHI_CHAIN (phi);
4255 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4256 add_phi_arg (phi_copy, def, e_copy);
4257 }
4258 }
4259 }
4260
4261 /* Blocks in REGION_COPY array of length N_REGION were created by
4262 duplication of basic blocks. Add phi node arguments for edges
4263 going from these blocks. */
4264
4265 void
4266 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region)
4267 {
4268 unsigned i;
4269
4270 for (i = 0; i < n_region; i++)
4271 region_copy[i]->flags |= BB_DUPLICATED;
4272
4273 for (i = 0; i < n_region; i++)
4274 add_phi_args_after_copy_bb (region_copy[i]);
4275
4276 for (i = 0; i < n_region; i++)
4277 region_copy[i]->flags &= ~BB_DUPLICATED;
4278 }
4279
4280 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
4281 important exit edge EXIT. By important we mean that no SSA name defined
4282 inside region is live over the other exit edges of the region. All entry
4283 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
4284 to the duplicate of the region. SSA form, dominance and loop information
4285 is updated. The new basic blocks are stored to REGION_COPY in the same
4286 order as they had in REGION, provided that REGION_COPY is not NULL.
4287 The function returns false if it is unable to copy the region,
4288 true otherwise. */
4289
4290 bool
4291 tree_duplicate_sese_region (edge entry, edge exit,
4292 basic_block *region, unsigned n_region,
4293 basic_block *region_copy)
4294 {
4295 unsigned i, n_doms;
4296 bool free_region_copy = false, copying_header = false;
4297 struct loop *loop = entry->dest->loop_father;
4298 edge exit_copy;
4299 basic_block *doms;
4300 edge redirected;
4301 int total_freq = 0, entry_freq = 0;
4302 gcov_type total_count = 0, entry_count = 0;
4303
4304 if (!can_copy_bbs_p (region, n_region))
4305 return false;
4306
4307 /* Some sanity checking. Note that we do not check for all possible
4308 missuses of the functions. I.e. if you ask to copy something weird,
4309 it will work, but the state of structures probably will not be
4310 correct. */
4311 for (i = 0; i < n_region; i++)
4312 {
4313 /* We do not handle subloops, i.e. all the blocks must belong to the
4314 same loop. */
4315 if (region[i]->loop_father != loop)
4316 return false;
4317
4318 if (region[i] != entry->dest
4319 && region[i] == loop->header)
4320 return false;
4321 }
4322
4323 loop->copy = loop;
4324
4325 /* In case the function is used for loop header copying (which is the primary
4326 use), ensure that EXIT and its copy will be new latch and entry edges. */
4327 if (loop->header == entry->dest)
4328 {
4329 copying_header = true;
4330 loop->copy = loop->outer;
4331
4332 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
4333 return false;
4334
4335 for (i = 0; i < n_region; i++)
4336 if (region[i] != exit->src
4337 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
4338 return false;
4339 }
4340
4341 if (!region_copy)
4342 {
4343 region_copy = xmalloc (sizeof (basic_block) * n_region);
4344 free_region_copy = true;
4345 }
4346
4347 gcc_assert (!need_ssa_update_p ());
4348
4349 /* Record blocks outside the region that are dominated by something
4350 inside. */
4351 doms = xmalloc (sizeof (basic_block) * n_basic_blocks);
4352 initialize_original_copy_tables ();
4353
4354 n_doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region, doms);
4355
4356 if (entry->dest->count)
4357 {
4358 total_count = entry->dest->count;
4359 entry_count = entry->count;
4360 /* Fix up corner cases, to avoid division by zero or creation of negative
4361 frequencies. */
4362 if (entry_count > total_count)
4363 entry_count = total_count;
4364 }
4365 else
4366 {
4367 total_freq = entry->dest->frequency;
4368 entry_freq = EDGE_FREQUENCY (entry);
4369 /* Fix up corner cases, to avoid division by zero or creation of negative
4370 frequencies. */
4371 if (total_freq == 0)
4372 total_freq = 1;
4373 else if (entry_freq > total_freq)
4374 entry_freq = total_freq;
4375 }
4376
4377 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
4378 split_edge_bb_loc (entry));
4379 if (total_count)
4380 {
4381 scale_bbs_frequencies_gcov_type (region, n_region,
4382 total_count - entry_count,
4383 total_count);
4384 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
4385 total_count);
4386 }
4387 else
4388 {
4389 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
4390 total_freq);
4391 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
4392 }
4393
4394 if (copying_header)
4395 {
4396 loop->header = exit->dest;
4397 loop->latch = exit->src;
4398 }
4399
4400 /* Redirect the entry and add the phi node arguments. */
4401 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
4402 gcc_assert (redirected != NULL);
4403 flush_pending_stmts (entry);
4404
4405 /* Concerning updating of dominators: We must recount dominators
4406 for entry block and its copy. Anything that is outside of the
4407 region, but was dominated by something inside needs recounting as
4408 well. */
4409 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
4410 doms[n_doms++] = get_bb_original (entry->dest);
4411 iterate_fix_dominators (CDI_DOMINATORS, doms, n_doms);
4412 free (doms);
4413
4414 /* Add the other PHI node arguments. */
4415 add_phi_args_after_copy (region_copy, n_region);
4416
4417 /* Update the SSA web. */
4418 update_ssa (TODO_update_ssa);
4419
4420 if (free_region_copy)
4421 free (region_copy);
4422
4423 free_original_copy_tables ();
4424 return true;
4425 }
4426
4427
4428 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h) */
4429
4430 void
4431 dump_function_to_file (tree fn, FILE *file, int flags)
4432 {
4433 tree arg, vars, var;
4434 bool ignore_topmost_bind = false, any_var = false;
4435 basic_block bb;
4436 tree chain;
4437
4438 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
4439
4440 arg = DECL_ARGUMENTS (fn);
4441 while (arg)
4442 {
4443 print_generic_expr (file, arg, dump_flags);
4444 if (TREE_CHAIN (arg))
4445 fprintf (file, ", ");
4446 arg = TREE_CHAIN (arg);
4447 }
4448 fprintf (file, ")\n");
4449
4450 if (flags & TDF_DETAILS)
4451 dump_eh_tree (file, DECL_STRUCT_FUNCTION (fn));
4452 if (flags & TDF_RAW)
4453 {
4454 dump_node (fn, TDF_SLIM | flags, file);
4455 return;
4456 }
4457
4458 /* When GIMPLE is lowered, the variables are no longer available in
4459 BIND_EXPRs, so display them separately. */
4460 if (cfun && cfun->decl == fn && cfun->unexpanded_var_list)
4461 {
4462 ignore_topmost_bind = true;
4463
4464 fprintf (file, "{\n");
4465 for (vars = cfun->unexpanded_var_list; vars; vars = TREE_CHAIN (vars))
4466 {
4467 var = TREE_VALUE (vars);
4468
4469 print_generic_decl (file, var, flags);
4470 fprintf (file, "\n");
4471
4472 any_var = true;
4473 }
4474 }
4475
4476 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
4477 {
4478 /* Make a CFG based dump. */
4479 check_bb_profile (ENTRY_BLOCK_PTR, file);
4480 if (!ignore_topmost_bind)
4481 fprintf (file, "{\n");
4482
4483 if (any_var && n_basic_blocks)
4484 fprintf (file, "\n");
4485
4486 FOR_EACH_BB (bb)
4487 dump_generic_bb (file, bb, 2, flags);
4488
4489 fprintf (file, "}\n");
4490 check_bb_profile (EXIT_BLOCK_PTR, file);
4491 }
4492 else
4493 {
4494 int indent;
4495
4496 /* Make a tree based dump. */
4497 chain = DECL_SAVED_TREE (fn);
4498
4499 if (TREE_CODE (chain) == BIND_EXPR)
4500 {
4501 if (ignore_topmost_bind)
4502 {
4503 chain = BIND_EXPR_BODY (chain);
4504 indent = 2;
4505 }
4506 else
4507 indent = 0;
4508 }
4509 else
4510 {
4511 if (!ignore_topmost_bind)
4512 fprintf (file, "{\n");
4513 indent = 2;
4514 }
4515
4516 if (any_var)
4517 fprintf (file, "\n");
4518
4519 print_generic_stmt_indented (file, chain, flags, indent);
4520 if (ignore_topmost_bind)
4521 fprintf (file, "}\n");
4522 }
4523
4524 fprintf (file, "\n\n");
4525 }
4526
4527
4528 /* Pretty print of the loops intermediate representation. */
4529 static void print_loop (FILE *, struct loop *, int);
4530 static void print_pred_bbs (FILE *, basic_block bb);
4531 static void print_succ_bbs (FILE *, basic_block bb);
4532
4533
4534 /* Print on FILE the indexes for the predecessors of basic_block BB. */
4535
4536 static void
4537 print_pred_bbs (FILE *file, basic_block bb)
4538 {
4539 edge e;
4540 edge_iterator ei;
4541
4542 FOR_EACH_EDGE (e, ei, bb->preds)
4543 fprintf (file, "bb_%d ", e->src->index);
4544 }
4545
4546
4547 /* Print on FILE the indexes for the successors of basic_block BB. */
4548
4549 static void
4550 print_succ_bbs (FILE *file, basic_block bb)
4551 {
4552 edge e;
4553 edge_iterator ei;
4554
4555 FOR_EACH_EDGE (e, ei, bb->succs)
4556 fprintf (file, "bb_%d ", e->dest->index);
4557 }
4558
4559
4560 /* Pretty print LOOP on FILE, indented INDENT spaces. */
4561
4562 static void
4563 print_loop (FILE *file, struct loop *loop, int indent)
4564 {
4565 char *s_indent;
4566 basic_block bb;
4567
4568 if (loop == NULL)
4569 return;
4570
4571 s_indent = (char *) alloca ((size_t) indent + 1);
4572 memset ((void *) s_indent, ' ', (size_t) indent);
4573 s_indent[indent] = '\0';
4574
4575 /* Print the loop's header. */
4576 fprintf (file, "%sloop_%d\n", s_indent, loop->num);
4577
4578 /* Print the loop's body. */
4579 fprintf (file, "%s{\n", s_indent);
4580 FOR_EACH_BB (bb)
4581 if (bb->loop_father == loop)
4582 {
4583 /* Print the basic_block's header. */
4584 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
4585 print_pred_bbs (file, bb);
4586 fprintf (file, "}, succs = {");
4587 print_succ_bbs (file, bb);
4588 fprintf (file, "})\n");
4589
4590 /* Print the basic_block's body. */
4591 fprintf (file, "%s {\n", s_indent);
4592 tree_dump_bb (bb, file, indent + 4);
4593 fprintf (file, "%s }\n", s_indent);
4594 }
4595
4596 print_loop (file, loop->inner, indent + 2);
4597 fprintf (file, "%s}\n", s_indent);
4598 print_loop (file, loop->next, indent);
4599 }
4600
4601
4602 /* Follow a CFG edge from the entry point of the program, and on entry
4603 of a loop, pretty print the loop structure on FILE. */
4604
4605 void
4606 print_loop_ir (FILE *file)
4607 {
4608 basic_block bb;
4609
4610 bb = BASIC_BLOCK (0);
4611 if (bb && bb->loop_father)
4612 print_loop (file, bb->loop_father, 0);
4613 }
4614
4615
4616 /* Debugging loops structure at tree level. */
4617
4618 void
4619 debug_loop_ir (void)
4620 {
4621 print_loop_ir (stderr);
4622 }
4623
4624
4625 /* Return true if BB ends with a call, possibly followed by some
4626 instructions that must stay with the call. Return false,
4627 otherwise. */
4628
4629 static bool
4630 tree_block_ends_with_call_p (basic_block bb)
4631 {
4632 block_stmt_iterator bsi = bsi_last (bb);
4633 return get_call_expr_in (bsi_stmt (bsi)) != NULL;
4634 }
4635
4636
4637 /* Return true if BB ends with a conditional branch. Return false,
4638 otherwise. */
4639
4640 static bool
4641 tree_block_ends_with_condjump_p (basic_block bb)
4642 {
4643 tree stmt = last_stmt (bb);
4644 return (stmt && TREE_CODE (stmt) == COND_EXPR);
4645 }
4646
4647
4648 /* Return true if we need to add fake edge to exit at statement T.
4649 Helper function for tree_flow_call_edges_add. */
4650
4651 static bool
4652 need_fake_edge_p (tree t)
4653 {
4654 tree call;
4655
4656 /* NORETURN and LONGJMP calls already have an edge to exit.
4657 CONST and PURE calls do not need one.
4658 We don't currently check for CONST and PURE here, although
4659 it would be a good idea, because those attributes are
4660 figured out from the RTL in mark_constant_function, and
4661 the counter incrementation code from -fprofile-arcs
4662 leads to different results from -fbranch-probabilities. */
4663 call = get_call_expr_in (t);
4664 if (call
4665 && !(call_expr_flags (call) & ECF_NORETURN))
4666 return true;
4667
4668 if (TREE_CODE (t) == ASM_EXPR
4669 && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
4670 return true;
4671
4672 return false;
4673 }
4674
4675
4676 /* Add fake edges to the function exit for any non constant and non
4677 noreturn calls, volatile inline assembly in the bitmap of blocks
4678 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
4679 the number of blocks that were split.
4680
4681 The goal is to expose cases in which entering a basic block does
4682 not imply that all subsequent instructions must be executed. */
4683
4684 static int
4685 tree_flow_call_edges_add (sbitmap blocks)
4686 {
4687 int i;
4688 int blocks_split = 0;
4689 int last_bb = last_basic_block;
4690 bool check_last_block = false;
4691
4692 if (n_basic_blocks == 0)
4693 return 0;
4694
4695 if (! blocks)
4696 check_last_block = true;
4697 else
4698 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4699
4700 /* In the last basic block, before epilogue generation, there will be
4701 a fallthru edge to EXIT. Special care is required if the last insn
4702 of the last basic block is a call because make_edge folds duplicate
4703 edges, which would result in the fallthru edge also being marked
4704 fake, which would result in the fallthru edge being removed by
4705 remove_fake_edges, which would result in an invalid CFG.
4706
4707 Moreover, we can't elide the outgoing fake edge, since the block
4708 profiler needs to take this into account in order to solve the minimal
4709 spanning tree in the case that the call doesn't return.
4710
4711 Handle this by adding a dummy instruction in a new last basic block. */
4712 if (check_last_block)
4713 {
4714 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4715 block_stmt_iterator bsi = bsi_last (bb);
4716 tree t = NULL_TREE;
4717 if (!bsi_end_p (bsi))
4718 t = bsi_stmt (bsi);
4719
4720 if (need_fake_edge_p (t))
4721 {
4722 edge e;
4723
4724 e = find_edge (bb, EXIT_BLOCK_PTR);
4725 if (e)
4726 {
4727 bsi_insert_on_edge (e, build_empty_stmt ());
4728 bsi_commit_edge_inserts ();
4729 }
4730 }
4731 }
4732
4733 /* Now add fake edges to the function exit for any non constant
4734 calls since there is no way that we can determine if they will
4735 return or not... */
4736 for (i = 0; i < last_bb; i++)
4737 {
4738 basic_block bb = BASIC_BLOCK (i);
4739 block_stmt_iterator bsi;
4740 tree stmt, last_stmt;
4741
4742 if (!bb)
4743 continue;
4744
4745 if (blocks && !TEST_BIT (blocks, i))
4746 continue;
4747
4748 bsi = bsi_last (bb);
4749 if (!bsi_end_p (bsi))
4750 {
4751 last_stmt = bsi_stmt (bsi);
4752 do
4753 {
4754 stmt = bsi_stmt (bsi);
4755 if (need_fake_edge_p (stmt))
4756 {
4757 edge e;
4758 /* The handling above of the final block before the
4759 epilogue should be enough to verify that there is
4760 no edge to the exit block in CFG already.
4761 Calling make_edge in such case would cause us to
4762 mark that edge as fake and remove it later. */
4763 #ifdef ENABLE_CHECKING
4764 if (stmt == last_stmt)
4765 {
4766 e = find_edge (bb, EXIT_BLOCK_PTR);
4767 gcc_assert (e == NULL);
4768 }
4769 #endif
4770
4771 /* Note that the following may create a new basic block
4772 and renumber the existing basic blocks. */
4773 if (stmt != last_stmt)
4774 {
4775 e = split_block (bb, stmt);
4776 if (e)
4777 blocks_split++;
4778 }
4779 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4780 }
4781 bsi_prev (&bsi);
4782 }
4783 while (!bsi_end_p (bsi));
4784 }
4785 }
4786
4787 if (blocks_split)
4788 verify_flow_info ();
4789
4790 return blocks_split;
4791 }
4792
4793 bool
4794 tree_purge_dead_eh_edges (basic_block bb)
4795 {
4796 bool changed = false;
4797 edge e;
4798 edge_iterator ei;
4799 tree stmt = last_stmt (bb);
4800
4801 if (stmt && tree_can_throw_internal (stmt))
4802 return false;
4803
4804 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4805 {
4806 if (e->flags & EDGE_EH)
4807 {
4808 remove_edge (e);
4809 changed = true;
4810 }
4811 else
4812 ei_next (&ei);
4813 }
4814
4815 /* Removal of dead EH edges might change dominators of not
4816 just immediate successors. E.g. when bb1 is changed so that
4817 it no longer can throw and bb1->bb3 and bb1->bb4 are dead
4818 eh edges purged by this function in:
4819 0
4820 / \
4821 v v
4822 1-->2
4823 / \ |
4824 v v |
4825 3-->4 |
4826 \ v
4827 --->5
4828 |
4829 -
4830 idom(bb5) must be recomputed. For now just free the dominance
4831 info. */
4832 if (changed)
4833 free_dominance_info (CDI_DOMINATORS);
4834
4835 return changed;
4836 }
4837
4838 bool
4839 tree_purge_all_dead_eh_edges (bitmap blocks)
4840 {
4841 bool changed = false;
4842 unsigned i;
4843 bitmap_iterator bi;
4844
4845 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
4846 {
4847 changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
4848 }
4849
4850 return changed;
4851 }
4852
4853 /* This function is called whenever a new edge is created or
4854 redirected. */
4855
4856 static void
4857 tree_execute_on_growing_pred (edge e)
4858 {
4859 basic_block bb = e->dest;
4860
4861 if (phi_nodes (bb))
4862 reserve_phi_args_for_new_edge (bb);
4863 }
4864
4865 /* This function is called immediately before edge E is removed from
4866 the edge vector E->dest->preds. */
4867
4868 static void
4869 tree_execute_on_shrinking_pred (edge e)
4870 {
4871 if (phi_nodes (e->dest))
4872 remove_phi_args (e);
4873 }
4874
4875 /*---------------------------------------------------------------------------
4876 Helper functions for Loop versioning
4877 ---------------------------------------------------------------------------*/
4878
4879 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
4880 of 'first'. Both of them are dominated by 'new_head' basic block. When
4881 'new_head' was created by 'second's incoming edge it received phi arguments
4882 on the edge by split_edge(). Later, additional edge 'e' was created to
4883 connect 'new_head' and 'first'. Now this routine adds phi args on this
4884 additional edge 'e' that new_head to second edge received as part of edge
4885 splitting.
4886 */
4887
4888 static void
4889 tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
4890 basic_block new_head, edge e)
4891 {
4892 tree phi1, phi2;
4893 edge e2 = find_edge (new_head, second);
4894
4895 /* Because NEW_HEAD has been created by splitting SECOND's incoming
4896 edge, we should always have an edge from NEW_HEAD to SECOND. */
4897 gcc_assert (e2 != NULL);
4898
4899 /* Browse all 'second' basic block phi nodes and add phi args to
4900 edge 'e' for 'first' head. PHI args are always in correct order. */
4901
4902 for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
4903 phi2 && phi1;
4904 phi2 = PHI_CHAIN (phi2), phi1 = PHI_CHAIN (phi1))
4905 {
4906 tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
4907 add_phi_arg (phi1, def, e);
4908 }
4909 }
4910
4911 /* Adds a if else statement to COND_BB with condition COND_EXPR.
4912 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
4913 the destination of the ELSE part. */
4914 static void
4915 tree_lv_add_condition_to_bb (basic_block first_head, basic_block second_head,
4916 basic_block cond_bb, void *cond_e)
4917 {
4918 block_stmt_iterator bsi;
4919 tree goto1 = NULL_TREE;
4920 tree goto2 = NULL_TREE;
4921 tree new_cond_expr = NULL_TREE;
4922 tree cond_expr = (tree) cond_e;
4923 edge e0;
4924
4925 /* Build new conditional expr */
4926 goto1 = build1 (GOTO_EXPR, void_type_node, tree_block_label (first_head));
4927 goto2 = build1 (GOTO_EXPR, void_type_node, tree_block_label (second_head));
4928 new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr, goto1, goto2);
4929
4930 /* Add new cond in cond_bb. */
4931 bsi = bsi_start (cond_bb);
4932 bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
4933 /* Adjust edges appropriately to connect new head with first head
4934 as well as second head. */
4935 e0 = single_succ_edge (cond_bb);
4936 e0->flags &= ~EDGE_FALLTHRU;
4937 e0->flags |= EDGE_FALSE_VALUE;
4938 }
4939
4940 struct cfg_hooks tree_cfg_hooks = {
4941 "tree",
4942 tree_verify_flow_info,
4943 tree_dump_bb, /* dump_bb */
4944 create_bb, /* create_basic_block */
4945 tree_redirect_edge_and_branch,/* redirect_edge_and_branch */
4946 tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force */
4947 remove_bb, /* delete_basic_block */
4948 tree_split_block, /* split_block */
4949 tree_move_block_after, /* move_block_after */
4950 tree_can_merge_blocks_p, /* can_merge_blocks_p */
4951 tree_merge_blocks, /* merge_blocks */
4952 tree_predict_edge, /* predict_edge */
4953 tree_predicted_by_p, /* predicted_by_p */
4954 tree_can_duplicate_bb_p, /* can_duplicate_block_p */
4955 tree_duplicate_bb, /* duplicate_block */
4956 tree_split_edge, /* split_edge */
4957 tree_make_forwarder_block, /* make_forward_block */
4958 NULL, /* tidy_fallthru_edge */
4959 tree_block_ends_with_call_p, /* block_ends_with_call_p */
4960 tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
4961 tree_flow_call_edges_add, /* flow_call_edges_add */
4962 tree_execute_on_growing_pred, /* execute_on_growing_pred */
4963 tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
4964 tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
4965 tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4966 tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
4967 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
4968 flush_pending_stmts /* flush_pending_stmts */
4969 };
4970
4971
4972 /* Split all critical edges. */
4973
4974 static void
4975 split_critical_edges (void)
4976 {
4977 basic_block bb;
4978 edge e;
4979 edge_iterator ei;
4980
4981 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
4982 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
4983 mappings around the calls to split_edge. */
4984 start_recording_case_labels ();
4985 FOR_ALL_BB (bb)
4986 {
4987 FOR_EACH_EDGE (e, ei, bb->succs)
4988 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
4989 {
4990 split_edge (e);
4991 }
4992 }
4993 end_recording_case_labels ();
4994 }
4995
4996 struct tree_opt_pass pass_split_crit_edges =
4997 {
4998 "crited", /* name */
4999 NULL, /* gate */
5000 split_critical_edges, /* execute */
5001 NULL, /* sub */
5002 NULL, /* next */
5003 0, /* static_pass_number */
5004 TV_TREE_SPLIT_EDGES, /* tv_id */
5005 PROP_cfg, /* properties required */
5006 PROP_no_crit_edges, /* properties_provided */
5007 0, /* properties_destroyed */
5008 0, /* todo_flags_start */
5009 TODO_dump_func, /* todo_flags_finish */
5010 0 /* letter */
5011 };
5012
5013 \f
5014 /* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
5015 a temporary, make sure and register it to be renamed if necessary,
5016 and finally return the temporary. Put the statements to compute
5017 EXP before the current statement in BSI. */
5018
5019 tree
5020 gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
5021 {
5022 tree t, new_stmt, orig_stmt;
5023
5024 if (is_gimple_val (exp))
5025 return exp;
5026
5027 t = make_rename_temp (type, NULL);
5028 new_stmt = build (MODIFY_EXPR, type, t, exp);
5029
5030 orig_stmt = bsi_stmt (*bsi);
5031 SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
5032 TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
5033
5034 bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
5035
5036 return t;
5037 }
5038
5039 /* Build a ternary operation and gimplify it. Emit code before BSI.
5040 Return the gimple_val holding the result. */
5041
5042 tree
5043 gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
5044 tree type, tree a, tree b, tree c)
5045 {
5046 tree ret;
5047
5048 ret = fold_build3 (code, type, a, b, c);
5049 STRIP_NOPS (ret);
5050
5051 return gimplify_val (bsi, type, ret);
5052 }
5053
5054 /* Build a binary operation and gimplify it. Emit code before BSI.
5055 Return the gimple_val holding the result. */
5056
5057 tree
5058 gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
5059 tree type, tree a, tree b)
5060 {
5061 tree ret;
5062
5063 ret = fold_build2 (code, type, a, b);
5064 STRIP_NOPS (ret);
5065
5066 return gimplify_val (bsi, type, ret);
5067 }
5068
5069 /* Build a unary operation and gimplify it. Emit code before BSI.
5070 Return the gimple_val holding the result. */
5071
5072 tree
5073 gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
5074 tree a)
5075 {
5076 tree ret;
5077
5078 ret = fold_build1 (code, type, a);
5079 STRIP_NOPS (ret);
5080
5081 return gimplify_val (bsi, type, ret);
5082 }
5083
5084
5085 \f
5086 /* Emit return warnings. */
5087
5088 static void
5089 execute_warn_function_return (void)
5090 {
5091 #ifdef USE_MAPPED_LOCATION
5092 source_location location;
5093 #else
5094 location_t *locus;
5095 #endif
5096 tree last;
5097 edge e;
5098 edge_iterator ei;
5099
5100 /* If we have a path to EXIT, then we do return. */
5101 if (TREE_THIS_VOLATILE (cfun->decl)
5102 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
5103 {
5104 #ifdef USE_MAPPED_LOCATION
5105 location = UNKNOWN_LOCATION;
5106 #else
5107 locus = NULL;
5108 #endif
5109 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5110 {
5111 last = last_stmt (e->src);
5112 if (TREE_CODE (last) == RETURN_EXPR
5113 #ifdef USE_MAPPED_LOCATION
5114 && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
5115 #else
5116 && (locus = EXPR_LOCUS (last)) != NULL)
5117 #endif
5118 break;
5119 }
5120 #ifdef USE_MAPPED_LOCATION
5121 if (location == UNKNOWN_LOCATION)
5122 location = cfun->function_end_locus;
5123 warning (0, "%H%<noreturn%> function does return", &location);
5124 #else
5125 if (!locus)
5126 locus = &cfun->function_end_locus;
5127 warning (0, "%H%<noreturn%> function does return", locus);
5128 #endif
5129 }
5130
5131 /* If we see "return;" in some basic block, then we do reach the end
5132 without returning a value. */
5133 else if (warn_return_type
5134 && !TREE_NO_WARNING (cfun->decl)
5135 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
5136 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
5137 {
5138 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5139 {
5140 tree last = last_stmt (e->src);
5141 if (TREE_CODE (last) == RETURN_EXPR
5142 && TREE_OPERAND (last, 0) == NULL
5143 && !TREE_NO_WARNING (last))
5144 {
5145 #ifdef USE_MAPPED_LOCATION
5146 location = EXPR_LOCATION (last);
5147 if (location == UNKNOWN_LOCATION)
5148 location = cfun->function_end_locus;
5149 warning (0, "%Hcontrol reaches end of non-void function", &location);
5150 #else
5151 locus = EXPR_LOCUS (last);
5152 if (!locus)
5153 locus = &cfun->function_end_locus;
5154 warning (0, "%Hcontrol reaches end of non-void function", locus);
5155 #endif
5156 TREE_NO_WARNING (cfun->decl) = 1;
5157 break;
5158 }
5159 }
5160 }
5161 }
5162
5163
5164 /* Given a basic block B which ends with a conditional and has
5165 precisely two successors, determine which of the edges is taken if
5166 the conditional is true and which is taken if the conditional is
5167 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
5168
5169 void
5170 extract_true_false_edges_from_block (basic_block b,
5171 edge *true_edge,
5172 edge *false_edge)
5173 {
5174 edge e = EDGE_SUCC (b, 0);
5175
5176 if (e->flags & EDGE_TRUE_VALUE)
5177 {
5178 *true_edge = e;
5179 *false_edge = EDGE_SUCC (b, 1);
5180 }
5181 else
5182 {
5183 *false_edge = e;
5184 *true_edge = EDGE_SUCC (b, 1);
5185 }
5186 }
5187
5188 struct tree_opt_pass pass_warn_function_return =
5189 {
5190 NULL, /* name */
5191 NULL, /* gate */
5192 execute_warn_function_return, /* execute */
5193 NULL, /* sub */
5194 NULL, /* next */
5195 0, /* static_pass_number */
5196 0, /* tv_id */
5197 PROP_cfg, /* properties_required */
5198 0, /* properties_provided */
5199 0, /* properties_destroyed */
5200 0, /* todo_flags_start */
5201 0, /* todo_flags_finish */
5202 0 /* letter */
5203 };
5204
5205 /* Emit noreturn warnings. */
5206
5207 static void
5208 execute_warn_function_noreturn (void)
5209 {
5210 if (warn_missing_noreturn
5211 && !TREE_THIS_VOLATILE (cfun->decl)
5212 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
5213 && !lang_hooks.function.missing_noreturn_ok_p (cfun->decl))
5214 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
5215 "for attribute %<noreturn%>",
5216 cfun->decl);
5217 }
5218
5219 struct tree_opt_pass pass_warn_function_noreturn =
5220 {
5221 NULL, /* name */
5222 NULL, /* gate */
5223 execute_warn_function_noreturn, /* execute */
5224 NULL, /* sub */
5225 NULL, /* next */
5226 0, /* static_pass_number */
5227 0, /* tv_id */
5228 PROP_cfg, /* properties_required */
5229 0, /* properties_provided */
5230 0, /* properties_destroyed */
5231 0, /* todo_flags_start */
5232 0, /* todo_flags_finish */
5233 0 /* letter */
5234 };