re PR middle-end/39625 (Revision 145338 breaks ability to build Ada)
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "ggc.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
39 #include "timevar.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "toplev.h"
43 #include "except.h"
44 #include "cfgloop.h"
45 #include "cfglayout.h"
46 #include "tree-ssa-propagate.h"
47 #include "value-prof.h"
48 #include "pointer-set.h"
49 #include "tree-inline.h"
50
51 /* This file contains functions for building the Control Flow Graph (CFG)
52 for a function tree. */
53
54 /* Local declarations. */
55
56 /* Initial capacity for the basic block array. */
57 static const int initial_cfg_capacity = 20;
58
59 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
60 which use a particular edge. The CASE_LABEL_EXPRs are chained together
61 via their TREE_CHAIN field, which we clear after we're done with the
62 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
63
64 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
65 update the case vector in response to edge redirections.
66
67 Right now this table is set up and torn down at key points in the
68 compilation process. It would be nice if we could make the table
69 more persistent. The key is getting notification of changes to
70 the CFG (particularly edge removal, creation and redirection). */
71
72 static struct pointer_map_t *edge_to_cases;
73
74 /* CFG statistics. */
75 struct cfg_stats_d
76 {
77 long num_merged_labels;
78 };
79
80 static struct cfg_stats_d cfg_stats;
81
82 /* Nonzero if we found a computed goto while building basic blocks. */
83 static bool found_computed_goto;
84
85 /* Basic blocks and flowgraphs. */
86 static void make_blocks (gimple_seq);
87 static void factor_computed_gotos (void);
88
89 /* Edges. */
90 static void make_edges (void);
91 static void make_cond_expr_edges (basic_block);
92 static void make_gimple_switch_edges (basic_block);
93 static void make_goto_expr_edges (basic_block);
94 static edge gimple_redirect_edge_and_branch (edge, basic_block);
95 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
96 static unsigned int split_critical_edges (void);
97
98 /* Various helpers. */
99 static inline bool stmt_starts_bb_p (gimple, gimple);
100 static int gimple_verify_flow_info (void);
101 static void gimple_make_forwarder_block (edge);
102 static void gimple_cfg2vcg (FILE *);
103
104 /* Flowgraph optimization and cleanup. */
105 static void gimple_merge_blocks (basic_block, basic_block);
106 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
107 static void remove_bb (basic_block);
108 static edge find_taken_edge_computed_goto (basic_block, tree);
109 static edge find_taken_edge_cond_expr (basic_block, tree);
110 static edge find_taken_edge_switch_expr (basic_block, tree);
111 static tree find_case_label_for_value (gimple, tree);
112
113 void
114 init_empty_tree_cfg_for_function (struct function *fn)
115 {
116 /* Initialize the basic block array. */
117 init_flow (fn);
118 profile_status_for_function (fn) = PROFILE_ABSENT;
119 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
120 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
121 basic_block_info_for_function (fn)
122 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
123 VEC_safe_grow_cleared (basic_block, gc,
124 basic_block_info_for_function (fn),
125 initial_cfg_capacity);
126
127 /* Build a mapping of labels to their associated blocks. */
128 label_to_block_map_for_function (fn)
129 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
130 VEC_safe_grow_cleared (basic_block, gc,
131 label_to_block_map_for_function (fn),
132 initial_cfg_capacity);
133
134 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
135 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
136 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
137 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
138
139 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
140 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
141 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
142 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
143 }
144
145 void
146 init_empty_tree_cfg (void)
147 {
148 init_empty_tree_cfg_for_function (cfun);
149 }
150
151 /*---------------------------------------------------------------------------
152 Create basic blocks
153 ---------------------------------------------------------------------------*/
154
155 /* Entry point to the CFG builder for trees. SEQ is the sequence of
156 statements to be added to the flowgraph. */
157
158 static void
159 build_gimple_cfg (gimple_seq seq)
160 {
161 /* Register specific gimple functions. */
162 gimple_register_cfg_hooks ();
163
164 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
165
166 init_empty_tree_cfg ();
167
168 found_computed_goto = 0;
169 make_blocks (seq);
170
171 /* Computed gotos are hell to deal with, especially if there are
172 lots of them with a large number of destinations. So we factor
173 them to a common computed goto location before we build the
174 edge list. After we convert back to normal form, we will un-factor
175 the computed gotos since factoring introduces an unwanted jump. */
176 if (found_computed_goto)
177 factor_computed_gotos ();
178
179 /* Make sure there is always at least one block, even if it's empty. */
180 if (n_basic_blocks == NUM_FIXED_BLOCKS)
181 create_empty_bb (ENTRY_BLOCK_PTR);
182
183 /* Adjust the size of the array. */
184 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
185 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
186
187 /* To speed up statement iterator walks, we first purge dead labels. */
188 cleanup_dead_labels ();
189
190 /* Group case nodes to reduce the number of edges.
191 We do this after cleaning up dead labels because otherwise we miss
192 a lot of obvious case merging opportunities. */
193 group_case_labels ();
194
195 /* Create the edges of the flowgraph. */
196 make_edges ();
197 cleanup_dead_labels ();
198
199 /* Debugging dumps. */
200
201 /* Write the flowgraph to a VCG file. */
202 {
203 int local_dump_flags;
204 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
205 if (vcg_file)
206 {
207 gimple_cfg2vcg (vcg_file);
208 dump_end (TDI_vcg, vcg_file);
209 }
210 }
211
212 #ifdef ENABLE_CHECKING
213 verify_stmts ();
214 #endif
215 }
216
217 static unsigned int
218 execute_build_cfg (void)
219 {
220 gimple_seq body = gimple_body (current_function_decl);
221
222 build_gimple_cfg (body);
223 gimple_set_body (current_function_decl, NULL);
224 if (dump_file && (dump_flags & TDF_DETAILS))
225 {
226 fprintf (dump_file, "Scope blocks:\n");
227 dump_scope_blocks (dump_file, dump_flags);
228 }
229 return 0;
230 }
231
232 struct gimple_opt_pass pass_build_cfg =
233 {
234 {
235 GIMPLE_PASS,
236 "cfg", /* name */
237 NULL, /* gate */
238 execute_build_cfg, /* execute */
239 NULL, /* sub */
240 NULL, /* next */
241 0, /* static_pass_number */
242 TV_TREE_CFG, /* tv_id */
243 PROP_gimple_leh, /* properties_required */
244 PROP_cfg, /* properties_provided */
245 0, /* properties_destroyed */
246 0, /* todo_flags_start */
247 TODO_verify_stmts | TODO_cleanup_cfg
248 | TODO_dump_func /* todo_flags_finish */
249 }
250 };
251
252
253 /* Return true if T is a computed goto. */
254
255 static bool
256 computed_goto_p (gimple t)
257 {
258 return (gimple_code (t) == GIMPLE_GOTO
259 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
260 }
261
262
263 /* Search the CFG for any computed gotos. If found, factor them to a
264 common computed goto site. Also record the location of that site so
265 that we can un-factor the gotos after we have converted back to
266 normal form. */
267
268 static void
269 factor_computed_gotos (void)
270 {
271 basic_block bb;
272 tree factored_label_decl = NULL;
273 tree var = NULL;
274 gimple factored_computed_goto_label = NULL;
275 gimple factored_computed_goto = NULL;
276
277 /* We know there are one or more computed gotos in this function.
278 Examine the last statement in each basic block to see if the block
279 ends with a computed goto. */
280
281 FOR_EACH_BB (bb)
282 {
283 gimple_stmt_iterator gsi = gsi_last_bb (bb);
284 gimple last;
285
286 if (gsi_end_p (gsi))
287 continue;
288
289 last = gsi_stmt (gsi);
290
291 /* Ignore the computed goto we create when we factor the original
292 computed gotos. */
293 if (last == factored_computed_goto)
294 continue;
295
296 /* If the last statement is a computed goto, factor it. */
297 if (computed_goto_p (last))
298 {
299 gimple assignment;
300
301 /* The first time we find a computed goto we need to create
302 the factored goto block and the variable each original
303 computed goto will use for their goto destination. */
304 if (!factored_computed_goto)
305 {
306 basic_block new_bb = create_empty_bb (bb);
307 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
308
309 /* Create the destination of the factored goto. Each original
310 computed goto will put its desired destination into this
311 variable and jump to the label we create immediately
312 below. */
313 var = create_tmp_var (ptr_type_node, "gotovar");
314
315 /* Build a label for the new block which will contain the
316 factored computed goto. */
317 factored_label_decl = create_artificial_label ();
318 factored_computed_goto_label
319 = gimple_build_label (factored_label_decl);
320 gsi_insert_after (&new_gsi, factored_computed_goto_label,
321 GSI_NEW_STMT);
322
323 /* Build our new computed goto. */
324 factored_computed_goto = gimple_build_goto (var);
325 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
326 }
327
328 /* Copy the original computed goto's destination into VAR. */
329 assignment = gimple_build_assign (var, gimple_goto_dest (last));
330 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
331
332 /* And re-vector the computed goto to the new destination. */
333 gimple_goto_set_dest (last, factored_label_decl);
334 }
335 }
336 }
337
338
339 /* Build a flowgraph for the sequence of stmts SEQ. */
340
341 static void
342 make_blocks (gimple_seq seq)
343 {
344 gimple_stmt_iterator i = gsi_start (seq);
345 gimple stmt = NULL;
346 bool start_new_block = true;
347 bool first_stmt_of_seq = true;
348 basic_block bb = ENTRY_BLOCK_PTR;
349
350 while (!gsi_end_p (i))
351 {
352 gimple prev_stmt;
353
354 prev_stmt = stmt;
355 stmt = gsi_stmt (i);
356
357 /* If the statement starts a new basic block or if we have determined
358 in a previous pass that we need to create a new block for STMT, do
359 so now. */
360 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
361 {
362 if (!first_stmt_of_seq)
363 seq = gsi_split_seq_before (&i);
364 bb = create_basic_block (seq, NULL, bb);
365 start_new_block = false;
366 }
367
368 /* Now add STMT to BB and create the subgraphs for special statement
369 codes. */
370 gimple_set_bb (stmt, bb);
371
372 if (computed_goto_p (stmt))
373 found_computed_goto = true;
374
375 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
376 next iteration. */
377 if (stmt_ends_bb_p (stmt))
378 {
379 /* If the stmt can make abnormal goto use a new temporary
380 for the assignment to the LHS. This makes sure the old value
381 of the LHS is available on the abnormal edge. Otherwise
382 we will end up with overlapping life-ranges for abnormal
383 SSA names. */
384 if (gimple_has_lhs (stmt)
385 && stmt_can_make_abnormal_goto (stmt)
386 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
387 {
388 tree lhs = gimple_get_lhs (stmt);
389 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
390 gimple s = gimple_build_assign (lhs, tmp);
391 gimple_set_location (s, gimple_location (stmt));
392 gimple_set_block (s, gimple_block (stmt));
393 gimple_set_lhs (stmt, tmp);
394 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
395 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
396 DECL_GIMPLE_REG_P (tmp) = 1;
397 gsi_insert_after (&i, s, GSI_SAME_STMT);
398 }
399 start_new_block = true;
400 }
401
402 gsi_next (&i);
403 first_stmt_of_seq = false;
404 }
405 }
406
407
408 /* Create and return a new empty basic block after bb AFTER. */
409
410 static basic_block
411 create_bb (void *h, void *e, basic_block after)
412 {
413 basic_block bb;
414
415 gcc_assert (!e);
416
417 /* Create and initialize a new basic block. Since alloc_block uses
418 ggc_alloc_cleared to allocate a basic block, we do not have to
419 clear the newly allocated basic block here. */
420 bb = alloc_block ();
421
422 bb->index = last_basic_block;
423 bb->flags = BB_NEW;
424 bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
425 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
426
427 /* Add the new block to the linked list of blocks. */
428 link_block (bb, after);
429
430 /* Grow the basic block array if needed. */
431 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
432 {
433 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
434 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
435 }
436
437 /* Add the newly created block to the array. */
438 SET_BASIC_BLOCK (last_basic_block, bb);
439
440 n_basic_blocks++;
441 last_basic_block++;
442
443 return bb;
444 }
445
446
447 /*---------------------------------------------------------------------------
448 Edge creation
449 ---------------------------------------------------------------------------*/
450
451 /* Fold COND_EXPR_COND of each COND_EXPR. */
452
453 void
454 fold_cond_expr_cond (void)
455 {
456 basic_block bb;
457
458 FOR_EACH_BB (bb)
459 {
460 gimple stmt = last_stmt (bb);
461
462 if (stmt && gimple_code (stmt) == GIMPLE_COND)
463 {
464 tree cond;
465 bool zerop, onep;
466
467 fold_defer_overflow_warnings ();
468 cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
469 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
470 if (cond)
471 {
472 zerop = integer_zerop (cond);
473 onep = integer_onep (cond);
474 }
475 else
476 zerop = onep = false;
477
478 fold_undefer_overflow_warnings (zerop || onep,
479 stmt,
480 WARN_STRICT_OVERFLOW_CONDITIONAL);
481 if (zerop)
482 gimple_cond_make_false (stmt);
483 else if (onep)
484 gimple_cond_make_true (stmt);
485 }
486 }
487 }
488
489 /* Join all the blocks in the flowgraph. */
490
491 static void
492 make_edges (void)
493 {
494 basic_block bb;
495 struct omp_region *cur_region = NULL;
496
497 /* Create an edge from entry to the first block with executable
498 statements in it. */
499 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
500
501 /* Traverse the basic block array placing edges. */
502 FOR_EACH_BB (bb)
503 {
504 gimple last = last_stmt (bb);
505 bool fallthru;
506
507 if (last)
508 {
509 enum gimple_code code = gimple_code (last);
510 switch (code)
511 {
512 case GIMPLE_GOTO:
513 make_goto_expr_edges (bb);
514 fallthru = false;
515 break;
516 case GIMPLE_RETURN:
517 make_edge (bb, EXIT_BLOCK_PTR, 0);
518 fallthru = false;
519 break;
520 case GIMPLE_COND:
521 make_cond_expr_edges (bb);
522 fallthru = false;
523 break;
524 case GIMPLE_SWITCH:
525 make_gimple_switch_edges (bb);
526 fallthru = false;
527 break;
528 case GIMPLE_RESX:
529 make_eh_edges (last);
530 fallthru = false;
531 break;
532
533 case GIMPLE_CALL:
534 /* If this function receives a nonlocal goto, then we need to
535 make edges from this call site to all the nonlocal goto
536 handlers. */
537 if (stmt_can_make_abnormal_goto (last))
538 make_abnormal_goto_edges (bb, true);
539
540 /* If this statement has reachable exception handlers, then
541 create abnormal edges to them. */
542 make_eh_edges (last);
543
544 /* Some calls are known not to return. */
545 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
546 break;
547
548 case GIMPLE_ASSIGN:
549 /* A GIMPLE_ASSIGN may throw internally and thus be considered
550 control-altering. */
551 if (is_ctrl_altering_stmt (last))
552 {
553 make_eh_edges (last);
554 }
555 fallthru = true;
556 break;
557
558 case GIMPLE_OMP_PARALLEL:
559 case GIMPLE_OMP_TASK:
560 case GIMPLE_OMP_FOR:
561 case GIMPLE_OMP_SINGLE:
562 case GIMPLE_OMP_MASTER:
563 case GIMPLE_OMP_ORDERED:
564 case GIMPLE_OMP_CRITICAL:
565 case GIMPLE_OMP_SECTION:
566 cur_region = new_omp_region (bb, code, cur_region);
567 fallthru = true;
568 break;
569
570 case GIMPLE_OMP_SECTIONS:
571 cur_region = new_omp_region (bb, code, cur_region);
572 fallthru = true;
573 break;
574
575 case GIMPLE_OMP_SECTIONS_SWITCH:
576 fallthru = false;
577 break;
578
579
580 case GIMPLE_OMP_ATOMIC_LOAD:
581 case GIMPLE_OMP_ATOMIC_STORE:
582 fallthru = true;
583 break;
584
585
586 case GIMPLE_OMP_RETURN:
587 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
588 somewhere other than the next block. This will be
589 created later. */
590 cur_region->exit = bb;
591 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
592 cur_region = cur_region->outer;
593 break;
594
595 case GIMPLE_OMP_CONTINUE:
596 cur_region->cont = bb;
597 switch (cur_region->type)
598 {
599 case GIMPLE_OMP_FOR:
600 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
601 succs edges as abnormal to prevent splitting
602 them. */
603 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
604 /* Make the loopback edge. */
605 make_edge (bb, single_succ (cur_region->entry),
606 EDGE_ABNORMAL);
607
608 /* Create an edge from GIMPLE_OMP_FOR to exit, which
609 corresponds to the case that the body of the loop
610 is not executed at all. */
611 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
612 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
613 fallthru = false;
614 break;
615
616 case GIMPLE_OMP_SECTIONS:
617 /* Wire up the edges into and out of the nested sections. */
618 {
619 basic_block switch_bb = single_succ (cur_region->entry);
620
621 struct omp_region *i;
622 for (i = cur_region->inner; i ; i = i->next)
623 {
624 gcc_assert (i->type == GIMPLE_OMP_SECTION);
625 make_edge (switch_bb, i->entry, 0);
626 make_edge (i->exit, bb, EDGE_FALLTHRU);
627 }
628
629 /* Make the loopback edge to the block with
630 GIMPLE_OMP_SECTIONS_SWITCH. */
631 make_edge (bb, switch_bb, 0);
632
633 /* Make the edge from the switch to exit. */
634 make_edge (switch_bb, bb->next_bb, 0);
635 fallthru = false;
636 }
637 break;
638
639 default:
640 gcc_unreachable ();
641 }
642 break;
643
644 default:
645 gcc_assert (!stmt_ends_bb_p (last));
646 fallthru = true;
647 }
648 }
649 else
650 fallthru = true;
651
652 if (fallthru)
653 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
654 }
655
656 if (root_omp_region)
657 free_omp_regions ();
658
659 /* Fold COND_EXPR_COND of each COND_EXPR. */
660 fold_cond_expr_cond ();
661 }
662
663
664 /* Create the edges for a GIMPLE_COND starting at block BB. */
665
666 static void
667 make_cond_expr_edges (basic_block bb)
668 {
669 gimple entry = last_stmt (bb);
670 gimple then_stmt, else_stmt;
671 basic_block then_bb, else_bb;
672 tree then_label, else_label;
673 edge e;
674
675 gcc_assert (entry);
676 gcc_assert (gimple_code (entry) == GIMPLE_COND);
677
678 /* Entry basic blocks for each component. */
679 then_label = gimple_cond_true_label (entry);
680 else_label = gimple_cond_false_label (entry);
681 then_bb = label_to_block (then_label);
682 else_bb = label_to_block (else_label);
683 then_stmt = first_stmt (then_bb);
684 else_stmt = first_stmt (else_bb);
685
686 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
687 e->goto_locus = gimple_location (then_stmt);
688 if (e->goto_locus)
689 e->goto_block = gimple_block (then_stmt);
690 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
691 if (e)
692 {
693 e->goto_locus = gimple_location (else_stmt);
694 if (e->goto_locus)
695 e->goto_block = gimple_block (else_stmt);
696 }
697
698 /* We do not need the labels anymore. */
699 gimple_cond_set_true_label (entry, NULL_TREE);
700 gimple_cond_set_false_label (entry, NULL_TREE);
701 }
702
703
704 /* Called for each element in the hash table (P) as we delete the
705 edge to cases hash table.
706
707 Clear all the TREE_CHAINs to prevent problems with copying of
708 SWITCH_EXPRs and structure sharing rules, then free the hash table
709 element. */
710
711 static bool
712 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
713 void *data ATTRIBUTE_UNUSED)
714 {
715 tree t, next;
716
717 for (t = (tree) *value; t; t = next)
718 {
719 next = TREE_CHAIN (t);
720 TREE_CHAIN (t) = NULL;
721 }
722
723 *value = NULL;
724 return false;
725 }
726
727 /* Start recording information mapping edges to case labels. */
728
729 void
730 start_recording_case_labels (void)
731 {
732 gcc_assert (edge_to_cases == NULL);
733 edge_to_cases = pointer_map_create ();
734 }
735
736 /* Return nonzero if we are recording information for case labels. */
737
738 static bool
739 recording_case_labels_p (void)
740 {
741 return (edge_to_cases != NULL);
742 }
743
744 /* Stop recording information mapping edges to case labels and
745 remove any information we have recorded. */
746 void
747 end_recording_case_labels (void)
748 {
749 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
750 pointer_map_destroy (edge_to_cases);
751 edge_to_cases = NULL;
752 }
753
754 /* If we are inside a {start,end}_recording_cases block, then return
755 a chain of CASE_LABEL_EXPRs from T which reference E.
756
757 Otherwise return NULL. */
758
759 static tree
760 get_cases_for_edge (edge e, gimple t)
761 {
762 void **slot;
763 size_t i, n;
764
765 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
766 chains available. Return NULL so the caller can detect this case. */
767 if (!recording_case_labels_p ())
768 return NULL;
769
770 slot = pointer_map_contains (edge_to_cases, e);
771 if (slot)
772 return (tree) *slot;
773
774 /* If we did not find E in the hash table, then this must be the first
775 time we have been queried for information about E & T. Add all the
776 elements from T to the hash table then perform the query again. */
777
778 n = gimple_switch_num_labels (t);
779 for (i = 0; i < n; i++)
780 {
781 tree elt = gimple_switch_label (t, i);
782 tree lab = CASE_LABEL (elt);
783 basic_block label_bb = label_to_block (lab);
784 edge this_edge = find_edge (e->src, label_bb);
785
786 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
787 a new chain. */
788 slot = pointer_map_insert (edge_to_cases, this_edge);
789 TREE_CHAIN (elt) = (tree) *slot;
790 *slot = elt;
791 }
792
793 return (tree) *pointer_map_contains (edge_to_cases, e);
794 }
795
796 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
797
798 static void
799 make_gimple_switch_edges (basic_block bb)
800 {
801 gimple entry = last_stmt (bb);
802 size_t i, n;
803
804 n = gimple_switch_num_labels (entry);
805
806 for (i = 0; i < n; ++i)
807 {
808 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
809 basic_block label_bb = label_to_block (lab);
810 make_edge (bb, label_bb, 0);
811 }
812 }
813
814
815 /* Return the basic block holding label DEST. */
816
817 basic_block
818 label_to_block_fn (struct function *ifun, tree dest)
819 {
820 int uid = LABEL_DECL_UID (dest);
821
822 /* We would die hard when faced by an undefined label. Emit a label to
823 the very first basic block. This will hopefully make even the dataflow
824 and undefined variable warnings quite right. */
825 if ((errorcount || sorrycount) && uid < 0)
826 {
827 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
828 gimple stmt;
829
830 stmt = gimple_build_label (dest);
831 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
832 uid = LABEL_DECL_UID (dest);
833 }
834 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
835 <= (unsigned int) uid)
836 return NULL;
837 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
838 }
839
840 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
841 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
842
843 void
844 make_abnormal_goto_edges (basic_block bb, bool for_call)
845 {
846 basic_block target_bb;
847 gimple_stmt_iterator gsi;
848
849 FOR_EACH_BB (target_bb)
850 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
851 {
852 gimple label_stmt = gsi_stmt (gsi);
853 tree target;
854
855 if (gimple_code (label_stmt) != GIMPLE_LABEL)
856 break;
857
858 target = gimple_label_label (label_stmt);
859
860 /* Make an edge to every label block that has been marked as a
861 potential target for a computed goto or a non-local goto. */
862 if ((FORCED_LABEL (target) && !for_call)
863 || (DECL_NONLOCAL (target) && for_call))
864 {
865 make_edge (bb, target_bb, EDGE_ABNORMAL);
866 break;
867 }
868 }
869 }
870
871 /* Create edges for a goto statement at block BB. */
872
873 static void
874 make_goto_expr_edges (basic_block bb)
875 {
876 gimple_stmt_iterator last = gsi_last_bb (bb);
877 gimple goto_t = gsi_stmt (last);
878
879 /* A simple GOTO creates normal edges. */
880 if (simple_goto_p (goto_t))
881 {
882 tree dest = gimple_goto_dest (goto_t);
883 edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
884 e->goto_locus = gimple_location (goto_t);
885 if (e->goto_locus)
886 e->goto_block = gimple_block (goto_t);
887 gsi_remove (&last, true);
888 return;
889 }
890
891 /* A computed GOTO creates abnormal edges. */
892 make_abnormal_goto_edges (bb, false);
893 }
894
895
896 /*---------------------------------------------------------------------------
897 Flowgraph analysis
898 ---------------------------------------------------------------------------*/
899
900 /* Cleanup useless labels in basic blocks. This is something we wish
901 to do early because it allows us to group case labels before creating
902 the edges for the CFG, and it speeds up block statement iterators in
903 all passes later on.
904 We rerun this pass after CFG is created, to get rid of the labels that
905 are no longer referenced. After then we do not run it any more, since
906 (almost) no new labels should be created. */
907
908 /* A map from basic block index to the leading label of that block. */
909 static struct label_record
910 {
911 /* The label. */
912 tree label;
913
914 /* True if the label is referenced from somewhere. */
915 bool used;
916 } *label_for_bb;
917
918 /* Callback for for_each_eh_region. Helper for cleanup_dead_labels. */
919 static void
920 update_eh_label (struct eh_region *region)
921 {
922 tree old_label = get_eh_region_tree_label (region);
923 if (old_label)
924 {
925 tree new_label;
926 basic_block bb = label_to_block (old_label);
927
928 /* ??? After optimizing, there may be EH regions with labels
929 that have already been removed from the function body, so
930 there is no basic block for them. */
931 if (! bb)
932 return;
933
934 new_label = label_for_bb[bb->index].label;
935 label_for_bb[bb->index].used = true;
936 set_eh_region_tree_label (region, new_label);
937 }
938 }
939
940
941 /* Given LABEL return the first label in the same basic block. */
942
943 static tree
944 main_block_label (tree label)
945 {
946 basic_block bb = label_to_block (label);
947 tree main_label = label_for_bb[bb->index].label;
948
949 /* label_to_block possibly inserted undefined label into the chain. */
950 if (!main_label)
951 {
952 label_for_bb[bb->index].label = label;
953 main_label = label;
954 }
955
956 label_for_bb[bb->index].used = true;
957 return main_label;
958 }
959
960 /* Cleanup redundant labels. This is a three-step process:
961 1) Find the leading label for each block.
962 2) Redirect all references to labels to the leading labels.
963 3) Cleanup all useless labels. */
964
965 void
966 cleanup_dead_labels (void)
967 {
968 basic_block bb;
969 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
970
971 /* Find a suitable label for each block. We use the first user-defined
972 label if there is one, or otherwise just the first label we see. */
973 FOR_EACH_BB (bb)
974 {
975 gimple_stmt_iterator i;
976
977 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
978 {
979 tree label;
980 gimple stmt = gsi_stmt (i);
981
982 if (gimple_code (stmt) != GIMPLE_LABEL)
983 break;
984
985 label = gimple_label_label (stmt);
986
987 /* If we have not yet seen a label for the current block,
988 remember this one and see if there are more labels. */
989 if (!label_for_bb[bb->index].label)
990 {
991 label_for_bb[bb->index].label = label;
992 continue;
993 }
994
995 /* If we did see a label for the current block already, but it
996 is an artificially created label, replace it if the current
997 label is a user defined label. */
998 if (!DECL_ARTIFICIAL (label)
999 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1000 {
1001 label_for_bb[bb->index].label = label;
1002 break;
1003 }
1004 }
1005 }
1006
1007 /* Now redirect all jumps/branches to the selected label.
1008 First do so for each block ending in a control statement. */
1009 FOR_EACH_BB (bb)
1010 {
1011 gimple stmt = last_stmt (bb);
1012 if (!stmt)
1013 continue;
1014
1015 switch (gimple_code (stmt))
1016 {
1017 case GIMPLE_COND:
1018 {
1019 tree true_label = gimple_cond_true_label (stmt);
1020 tree false_label = gimple_cond_false_label (stmt);
1021
1022 if (true_label)
1023 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1024 if (false_label)
1025 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1026 break;
1027 }
1028
1029 case GIMPLE_SWITCH:
1030 {
1031 size_t i, n = gimple_switch_num_labels (stmt);
1032
1033 /* Replace all destination labels. */
1034 for (i = 0; i < n; ++i)
1035 {
1036 tree case_label = gimple_switch_label (stmt, i);
1037 tree label = main_block_label (CASE_LABEL (case_label));
1038 CASE_LABEL (case_label) = label;
1039 }
1040 break;
1041 }
1042
1043 /* We have to handle gotos until they're removed, and we don't
1044 remove them until after we've created the CFG edges. */
1045 case GIMPLE_GOTO:
1046 if (!computed_goto_p (stmt))
1047 {
1048 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1049 gimple_goto_set_dest (stmt, new_dest);
1050 break;
1051 }
1052
1053 default:
1054 break;
1055 }
1056 }
1057
1058 for_each_eh_region (update_eh_label);
1059
1060 /* Finally, purge dead labels. All user-defined labels and labels that
1061 can be the target of non-local gotos and labels which have their
1062 address taken are preserved. */
1063 FOR_EACH_BB (bb)
1064 {
1065 gimple_stmt_iterator i;
1066 tree label_for_this_bb = label_for_bb[bb->index].label;
1067
1068 if (!label_for_this_bb)
1069 continue;
1070
1071 /* If the main label of the block is unused, we may still remove it. */
1072 if (!label_for_bb[bb->index].used)
1073 label_for_this_bb = NULL;
1074
1075 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1076 {
1077 tree label;
1078 gimple stmt = gsi_stmt (i);
1079
1080 if (gimple_code (stmt) != GIMPLE_LABEL)
1081 break;
1082
1083 label = gimple_label_label (stmt);
1084
1085 if (label == label_for_this_bb
1086 || !DECL_ARTIFICIAL (label)
1087 || DECL_NONLOCAL (label)
1088 || FORCED_LABEL (label))
1089 gsi_next (&i);
1090 else
1091 gsi_remove (&i, true);
1092 }
1093 }
1094
1095 free (label_for_bb);
1096 }
1097
1098 /* Look for blocks ending in a multiway branch (a SWITCH_EXPR in GIMPLE),
1099 and scan the sorted vector of cases. Combine the ones jumping to the
1100 same label.
1101 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1102
1103 void
1104 group_case_labels (void)
1105 {
1106 basic_block bb;
1107
1108 FOR_EACH_BB (bb)
1109 {
1110 gimple stmt = last_stmt (bb);
1111 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1112 {
1113 int old_size = gimple_switch_num_labels (stmt);
1114 int i, j, new_size = old_size;
1115 tree default_case = NULL_TREE;
1116 tree default_label = NULL_TREE;
1117 bool has_default;
1118
1119 /* The default label is always the first case in a switch
1120 statement after gimplification if it was not optimized
1121 away */
1122 if (!CASE_LOW (gimple_switch_default_label (stmt))
1123 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1124 {
1125 default_case = gimple_switch_default_label (stmt);
1126 default_label = CASE_LABEL (default_case);
1127 has_default = true;
1128 }
1129 else
1130 has_default = false;
1131
1132 /* Look for possible opportunities to merge cases. */
1133 if (has_default)
1134 i = 1;
1135 else
1136 i = 0;
1137 while (i < old_size)
1138 {
1139 tree base_case, base_label, base_high;
1140 base_case = gimple_switch_label (stmt, i);
1141
1142 gcc_assert (base_case);
1143 base_label = CASE_LABEL (base_case);
1144
1145 /* Discard cases that have the same destination as the
1146 default case. */
1147 if (base_label == default_label)
1148 {
1149 gimple_switch_set_label (stmt, i, NULL_TREE);
1150 i++;
1151 new_size--;
1152 continue;
1153 }
1154
1155 base_high = CASE_HIGH (base_case)
1156 ? CASE_HIGH (base_case)
1157 : CASE_LOW (base_case);
1158 i++;
1159
1160 /* Try to merge case labels. Break out when we reach the end
1161 of the label vector or when we cannot merge the next case
1162 label with the current one. */
1163 while (i < old_size)
1164 {
1165 tree merge_case = gimple_switch_label (stmt, i);
1166 tree merge_label = CASE_LABEL (merge_case);
1167 tree t = int_const_binop (PLUS_EXPR, base_high,
1168 integer_one_node, 1);
1169
1170 /* Merge the cases if they jump to the same place,
1171 and their ranges are consecutive. */
1172 if (merge_label == base_label
1173 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1174 {
1175 base_high = CASE_HIGH (merge_case) ?
1176 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1177 CASE_HIGH (base_case) = base_high;
1178 gimple_switch_set_label (stmt, i, NULL_TREE);
1179 new_size--;
1180 i++;
1181 }
1182 else
1183 break;
1184 }
1185 }
1186
1187 /* Compress the case labels in the label vector, and adjust the
1188 length of the vector. */
1189 for (i = 0, j = 0; i < new_size; i++)
1190 {
1191 while (! gimple_switch_label (stmt, j))
1192 j++;
1193 gimple_switch_set_label (stmt, i,
1194 gimple_switch_label (stmt, j++));
1195 }
1196
1197 gcc_assert (new_size <= old_size);
1198 gimple_switch_set_num_labels (stmt, new_size);
1199 }
1200 }
1201 }
1202
1203 /* Checks whether we can merge block B into block A. */
1204
1205 static bool
1206 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1207 {
1208 gimple stmt;
1209 gimple_stmt_iterator gsi;
1210 gimple_seq phis;
1211
1212 if (!single_succ_p (a))
1213 return false;
1214
1215 if (single_succ_edge (a)->flags & EDGE_ABNORMAL)
1216 return false;
1217
1218 if (single_succ (a) != b)
1219 return false;
1220
1221 if (!single_pred_p (b))
1222 return false;
1223
1224 if (b == EXIT_BLOCK_PTR)
1225 return false;
1226
1227 /* If A ends by a statement causing exceptions or something similar, we
1228 cannot merge the blocks. */
1229 stmt = last_stmt (a);
1230 if (stmt && stmt_ends_bb_p (stmt))
1231 return false;
1232
1233 /* Do not allow a block with only a non-local label to be merged. */
1234 if (stmt
1235 && gimple_code (stmt) == GIMPLE_LABEL
1236 && DECL_NONLOCAL (gimple_label_label (stmt)))
1237 return false;
1238
1239 /* It must be possible to eliminate all phi nodes in B. If ssa form
1240 is not up-to-date, we cannot eliminate any phis; however, if only
1241 some symbols as whole are marked for renaming, this is not a problem,
1242 as phi nodes for those symbols are irrelevant in updating anyway. */
1243 phis = phi_nodes (b);
1244 if (!gimple_seq_empty_p (phis))
1245 {
1246 gimple_stmt_iterator i;
1247
1248 if (name_mappings_registered_p ())
1249 return false;
1250
1251 for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
1252 {
1253 gimple phi = gsi_stmt (i);
1254
1255 if (!is_gimple_reg (gimple_phi_result (phi))
1256 && !may_propagate_copy (gimple_phi_result (phi),
1257 gimple_phi_arg_def (phi, 0)))
1258 return false;
1259 }
1260 }
1261
1262 /* Do not remove user labels. */
1263 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1264 {
1265 stmt = gsi_stmt (gsi);
1266 if (gimple_code (stmt) != GIMPLE_LABEL)
1267 break;
1268 if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
1269 return false;
1270 }
1271
1272 /* Protect the loop latches. */
1273 if (current_loops
1274 && b->loop_father->latch == b)
1275 return false;
1276
1277 return true;
1278 }
1279
1280 /* Replaces all uses of NAME by VAL. */
1281
1282 void
1283 replace_uses_by (tree name, tree val)
1284 {
1285 imm_use_iterator imm_iter;
1286 use_operand_p use;
1287 gimple stmt;
1288 edge e;
1289
1290 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1291 {
1292 if (gimple_code (stmt) != GIMPLE_PHI)
1293 push_stmt_changes (&stmt);
1294
1295 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1296 {
1297 replace_exp (use, val);
1298
1299 if (gimple_code (stmt) == GIMPLE_PHI)
1300 {
1301 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1302 if (e->flags & EDGE_ABNORMAL)
1303 {
1304 /* This can only occur for virtual operands, since
1305 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1306 would prevent replacement. */
1307 gcc_assert (!is_gimple_reg (name));
1308 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1309 }
1310 }
1311 }
1312
1313 if (gimple_code (stmt) != GIMPLE_PHI)
1314 {
1315 size_t i;
1316
1317 fold_stmt_inplace (stmt);
1318 if (cfgcleanup_altered_bbs)
1319 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1320
1321 /* FIXME. This should go in pop_stmt_changes. */
1322 for (i = 0; i < gimple_num_ops (stmt); i++)
1323 {
1324 tree op = gimple_op (stmt, i);
1325 /* Operands may be empty here. For example, the labels
1326 of a GIMPLE_COND are nulled out following the creation
1327 of the corresponding CFG edges. */
1328 if (op && TREE_CODE (op) == ADDR_EXPR)
1329 recompute_tree_invariant_for_addr_expr (op);
1330 }
1331
1332 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1333
1334 pop_stmt_changes (&stmt);
1335 }
1336 }
1337
1338 gcc_assert (has_zero_uses (name));
1339
1340 /* Also update the trees stored in loop structures. */
1341 if (current_loops)
1342 {
1343 struct loop *loop;
1344 loop_iterator li;
1345
1346 FOR_EACH_LOOP (li, loop, 0)
1347 {
1348 substitute_in_loop_info (loop, name, val);
1349 }
1350 }
1351 }
1352
1353 /* Merge block B into block A. */
1354
1355 static void
1356 gimple_merge_blocks (basic_block a, basic_block b)
1357 {
1358 gimple_stmt_iterator last, gsi, psi;
1359 gimple_seq phis = phi_nodes (b);
1360
1361 if (dump_file)
1362 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1363
1364 /* Remove all single-valued PHI nodes from block B of the form
1365 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1366 gsi = gsi_last_bb (a);
1367 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1368 {
1369 gimple phi = gsi_stmt (psi);
1370 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1371 gimple copy;
1372 bool may_replace_uses = !is_gimple_reg (def)
1373 || may_propagate_copy (def, use);
1374
1375 /* In case we maintain loop closed ssa form, do not propagate arguments
1376 of loop exit phi nodes. */
1377 if (current_loops
1378 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1379 && is_gimple_reg (def)
1380 && TREE_CODE (use) == SSA_NAME
1381 && a->loop_father != b->loop_father)
1382 may_replace_uses = false;
1383
1384 if (!may_replace_uses)
1385 {
1386 gcc_assert (is_gimple_reg (def));
1387
1388 /* Note that just emitting the copies is fine -- there is no problem
1389 with ordering of phi nodes. This is because A is the single
1390 predecessor of B, therefore results of the phi nodes cannot
1391 appear as arguments of the phi nodes. */
1392 copy = gimple_build_assign (def, use);
1393 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1394 remove_phi_node (&psi, false);
1395 }
1396 else
1397 {
1398 /* If we deal with a PHI for virtual operands, we can simply
1399 propagate these without fussing with folding or updating
1400 the stmt. */
1401 if (!is_gimple_reg (def))
1402 {
1403 imm_use_iterator iter;
1404 use_operand_p use_p;
1405 gimple stmt;
1406
1407 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1408 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1409 SET_USE (use_p, use);
1410 }
1411 else
1412 replace_uses_by (def, use);
1413
1414 remove_phi_node (&psi, true);
1415 }
1416 }
1417
1418 /* Ensure that B follows A. */
1419 move_block_after (b, a);
1420
1421 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1422 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1423
1424 /* Remove labels from B and set gimple_bb to A for other statements. */
1425 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1426 {
1427 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
1428 {
1429 gimple label = gsi_stmt (gsi);
1430
1431 gsi_remove (&gsi, false);
1432
1433 /* Now that we can thread computed gotos, we might have
1434 a situation where we have a forced label in block B
1435 However, the label at the start of block B might still be
1436 used in other ways (think about the runtime checking for
1437 Fortran assigned gotos). So we can not just delete the
1438 label. Instead we move the label to the start of block A. */
1439 if (FORCED_LABEL (gimple_label_label (label)))
1440 {
1441 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1442 gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
1443 }
1444 }
1445 else
1446 {
1447 gimple_set_bb (gsi_stmt (gsi), a);
1448 gsi_next (&gsi);
1449 }
1450 }
1451
1452 /* Merge the sequences. */
1453 last = gsi_last_bb (a);
1454 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1455 set_bb_seq (b, NULL);
1456
1457 if (cfgcleanup_altered_bbs)
1458 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1459 }
1460
1461
1462 /* Return the one of two successors of BB that is not reachable by a
1463 reached by a complex edge, if there is one. Else, return BB. We use
1464 this in optimizations that use post-dominators for their heuristics,
1465 to catch the cases in C++ where function calls are involved. */
1466
1467 basic_block
1468 single_noncomplex_succ (basic_block bb)
1469 {
1470 edge e0, e1;
1471 if (EDGE_COUNT (bb->succs) != 2)
1472 return bb;
1473
1474 e0 = EDGE_SUCC (bb, 0);
1475 e1 = EDGE_SUCC (bb, 1);
1476 if (e0->flags & EDGE_COMPLEX)
1477 return e1->dest;
1478 if (e1->flags & EDGE_COMPLEX)
1479 return e0->dest;
1480
1481 return bb;
1482 }
1483
1484
1485 /* Walk the function tree removing unnecessary statements.
1486
1487 * Empty statement nodes are removed
1488
1489 * Unnecessary TRY_FINALLY and TRY_CATCH blocks are removed
1490
1491 * Unnecessary COND_EXPRs are removed
1492
1493 * Some unnecessary BIND_EXPRs are removed
1494
1495 * GOTO_EXPRs immediately preceding destination are removed.
1496
1497 Clearly more work could be done. The trick is doing the analysis
1498 and removal fast enough to be a net improvement in compile times.
1499
1500 Note that when we remove a control structure such as a COND_EXPR
1501 BIND_EXPR, or TRY block, we will need to repeat this optimization pass
1502 to ensure we eliminate all the useless code. */
1503
1504 struct rus_data
1505 {
1506 bool repeat;
1507 bool may_throw;
1508 bool may_branch;
1509 bool has_label;
1510 bool last_was_goto;
1511 gimple_stmt_iterator last_goto_gsi;
1512 };
1513
1514
1515 static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
1516
1517 /* Given a statement sequence, find the first executable statement with
1518 location information, and warn that it is unreachable. When searching,
1519 descend into containers in execution order. */
1520
1521 static bool
1522 remove_useless_stmts_warn_notreached (gimple_seq stmts)
1523 {
1524 gimple_stmt_iterator gsi;
1525
1526 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
1527 {
1528 gimple stmt = gsi_stmt (gsi);
1529
1530 if (gimple_has_location (stmt))
1531 {
1532 location_t loc = gimple_location (stmt);
1533 if (LOCATION_LINE (loc) > 0)
1534 {
1535 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
1536 return true;
1537 }
1538 }
1539
1540 switch (gimple_code (stmt))
1541 {
1542 /* Unfortunately, we need the CFG now to detect unreachable
1543 branches in a conditional, so conditionals are not handled here. */
1544
1545 case GIMPLE_TRY:
1546 if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
1547 return true;
1548 if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
1549 return true;
1550 break;
1551
1552 case GIMPLE_CATCH:
1553 return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
1554
1555 case GIMPLE_EH_FILTER:
1556 return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
1557
1558 case GIMPLE_BIND:
1559 return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
1560
1561 default:
1562 break;
1563 }
1564 }
1565
1566 return false;
1567 }
1568
1569 /* Helper for remove_useless_stmts_1. Handle GIMPLE_COND statements. */
1570
1571 static void
1572 remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
1573 {
1574 gimple stmt = gsi_stmt (*gsi);
1575
1576 /* The folded result must still be a conditional statement. */
1577 fold_stmt_inplace (stmt);
1578
1579 data->may_branch = true;
1580
1581 /* Replace trivial conditionals with gotos. */
1582 if (gimple_cond_true_p (stmt))
1583 {
1584 /* Goto THEN label. */
1585 tree then_label = gimple_cond_true_label (stmt);
1586
1587 gsi_replace (gsi, gimple_build_goto (then_label), false);
1588 data->last_goto_gsi = *gsi;
1589 data->last_was_goto = true;
1590 data->repeat = true;
1591 }
1592 else if (gimple_cond_false_p (stmt))
1593 {
1594 /* Goto ELSE label. */
1595 tree else_label = gimple_cond_false_label (stmt);
1596
1597 gsi_replace (gsi, gimple_build_goto (else_label), false);
1598 data->last_goto_gsi = *gsi;
1599 data->last_was_goto = true;
1600 data->repeat = true;
1601 }
1602 else
1603 {
1604 tree then_label = gimple_cond_true_label (stmt);
1605 tree else_label = gimple_cond_false_label (stmt);
1606
1607 if (then_label == else_label)
1608 {
1609 /* Goto common destination. */
1610 gsi_replace (gsi, gimple_build_goto (then_label), false);
1611 data->last_goto_gsi = *gsi;
1612 data->last_was_goto = true;
1613 data->repeat = true;
1614 }
1615 }
1616
1617 gsi_next (gsi);
1618
1619 data->last_was_goto = false;
1620 }
1621
1622 /* Helper for remove_useless_stmts_1.
1623 Handle the try-finally case for GIMPLE_TRY statements. */
1624
1625 static void
1626 remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
1627 {
1628 bool save_may_branch, save_may_throw;
1629 bool this_may_branch, this_may_throw;
1630
1631 gimple_seq eval_seq, cleanup_seq;
1632 gimple_stmt_iterator eval_gsi, cleanup_gsi;
1633
1634 gimple stmt = gsi_stmt (*gsi);
1635
1636 /* Collect may_branch and may_throw information for the body only. */
1637 save_may_branch = data->may_branch;
1638 save_may_throw = data->may_throw;
1639 data->may_branch = false;
1640 data->may_throw = false;
1641 data->last_was_goto = false;
1642
1643 eval_seq = gimple_try_eval (stmt);
1644 eval_gsi = gsi_start (eval_seq);
1645 remove_useless_stmts_1 (&eval_gsi, data);
1646
1647 this_may_branch = data->may_branch;
1648 this_may_throw = data->may_throw;
1649 data->may_branch |= save_may_branch;
1650 data->may_throw |= save_may_throw;
1651 data->last_was_goto = false;
1652
1653 cleanup_seq = gimple_try_cleanup (stmt);
1654 cleanup_gsi = gsi_start (cleanup_seq);
1655 remove_useless_stmts_1 (&cleanup_gsi, data);
1656
1657 /* If the body is empty, then we can emit the FINALLY block without
1658 the enclosing TRY_FINALLY_EXPR. */
1659 if (gimple_seq_empty_p (eval_seq))
1660 {
1661 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1662 gsi_remove (gsi, false);
1663 data->repeat = true;
1664 }
1665
1666 /* If the handler is empty, then we can emit the TRY block without
1667 the enclosing TRY_FINALLY_EXPR. */
1668 else if (gimple_seq_empty_p (cleanup_seq))
1669 {
1670 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1671 gsi_remove (gsi, false);
1672 data->repeat = true;
1673 }
1674
1675 /* If the body neither throws, nor branches, then we can safely
1676 string the TRY and FINALLY blocks together. */
1677 else if (!this_may_branch && !this_may_throw)
1678 {
1679 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1680 gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
1681 gsi_remove (gsi, false);
1682 data->repeat = true;
1683 }
1684 else
1685 gsi_next (gsi);
1686 }
1687
1688 /* Helper for remove_useless_stmts_1.
1689 Handle the try-catch case for GIMPLE_TRY statements. */
1690
1691 static void
1692 remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
1693 {
1694 bool save_may_throw, this_may_throw;
1695
1696 gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
1697 gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
1698
1699 gimple stmt = gsi_stmt (*gsi);
1700
1701 /* Collect may_throw information for the body only. */
1702 save_may_throw = data->may_throw;
1703 data->may_throw = false;
1704 data->last_was_goto = false;
1705
1706 eval_seq = gimple_try_eval (stmt);
1707 eval_gsi = gsi_start (eval_seq);
1708 remove_useless_stmts_1 (&eval_gsi, data);
1709
1710 this_may_throw = data->may_throw;
1711 data->may_throw = save_may_throw;
1712
1713 cleanup_seq = gimple_try_cleanup (stmt);
1714
1715 /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR. */
1716 if (!this_may_throw)
1717 {
1718 if (warn_notreached)
1719 {
1720 remove_useless_stmts_warn_notreached (cleanup_seq);
1721 }
1722 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1723 gsi_remove (gsi, false);
1724 data->repeat = true;
1725 return;
1726 }
1727
1728 /* Process the catch clause specially. We may be able to tell that
1729 no exceptions propagate past this point. */
1730
1731 this_may_throw = true;
1732 cleanup_gsi = gsi_start (cleanup_seq);
1733 stmt = gsi_stmt (cleanup_gsi);
1734 data->last_was_goto = false;
1735
1736 switch (gimple_code (stmt))
1737 {
1738 case GIMPLE_CATCH:
1739 /* If the first element is a catch, they all must be. */
1740 while (!gsi_end_p (cleanup_gsi))
1741 {
1742 stmt = gsi_stmt (cleanup_gsi);
1743 /* If we catch all exceptions, then the body does not
1744 propagate exceptions past this point. */
1745 if (gimple_catch_types (stmt) == NULL)
1746 this_may_throw = false;
1747 data->last_was_goto = false;
1748 handler_seq = gimple_catch_handler (stmt);
1749 handler_gsi = gsi_start (handler_seq);
1750 remove_useless_stmts_1 (&handler_gsi, data);
1751 gsi_next (&cleanup_gsi);
1752 }
1753 gsi_next (gsi);
1754 break;
1755
1756 case GIMPLE_EH_FILTER:
1757 /* If the first element is an eh_filter, it should stand alone. */
1758 if (gimple_eh_filter_must_not_throw (stmt))
1759 this_may_throw = false;
1760 else if (gimple_eh_filter_types (stmt) == NULL)
1761 this_may_throw = false;
1762 failure_seq = gimple_eh_filter_failure (stmt);
1763 failure_gsi = gsi_start (failure_seq);
1764 remove_useless_stmts_1 (&failure_gsi, data);
1765 gsi_next (gsi);
1766 break;
1767
1768 default:
1769 /* Otherwise this is a list of cleanup statements. */
1770 remove_useless_stmts_1 (&cleanup_gsi, data);
1771
1772 /* If the cleanup is empty, then we can emit the TRY block without
1773 the enclosing TRY_CATCH_EXPR. */
1774 if (gimple_seq_empty_p (cleanup_seq))
1775 {
1776 gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
1777 gsi_remove(gsi, false);
1778 data->repeat = true;
1779 }
1780 else
1781 gsi_next (gsi);
1782 break;
1783 }
1784
1785 data->may_throw |= this_may_throw;
1786 }
1787
1788 /* Helper for remove_useless_stmts_1. Handle GIMPLE_BIND statements. */
1789
1790 static void
1791 remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
1792 {
1793 tree block;
1794 gimple_seq body_seq, fn_body_seq;
1795 gimple_stmt_iterator body_gsi;
1796
1797 gimple stmt = gsi_stmt (*gsi);
1798
1799 /* First remove anything underneath the BIND_EXPR. */
1800
1801 body_seq = gimple_bind_body (stmt);
1802 body_gsi = gsi_start (body_seq);
1803 remove_useless_stmts_1 (&body_gsi, data);
1804
1805 /* If the GIMPLE_BIND has no variables, then we can pull everything
1806 up one level and remove the GIMPLE_BIND, unless this is the toplevel
1807 GIMPLE_BIND for the current function or an inlined function.
1808
1809 When this situation occurs we will want to apply this
1810 optimization again. */
1811 block = gimple_bind_block (stmt);
1812 fn_body_seq = gimple_body (current_function_decl);
1813 if (gimple_bind_vars (stmt) == NULL_TREE
1814 && (gimple_seq_empty_p (fn_body_seq)
1815 || stmt != gimple_seq_first_stmt (fn_body_seq))
1816 && (! block
1817 || ! BLOCK_ABSTRACT_ORIGIN (block)
1818 || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
1819 != FUNCTION_DECL)))
1820 {
1821 tree var = NULL_TREE;
1822 /* Even if there are no gimple_bind_vars, there might be other
1823 decls in BLOCK_VARS rendering the GIMPLE_BIND not useless. */
1824 if (block && !BLOCK_NUM_NONLOCALIZED_VARS (block))
1825 for (var = BLOCK_VARS (block); var; var = TREE_CHAIN (var))
1826 if (TREE_CODE (var) == IMPORTED_DECL)
1827 break;
1828 if (var || (block && BLOCK_NUM_NONLOCALIZED_VARS (block)))
1829 gsi_next (gsi);
1830 else
1831 {
1832 gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
1833 gsi_remove (gsi, false);
1834 data->repeat = true;
1835 }
1836 }
1837 else
1838 gsi_next (gsi);
1839 }
1840
1841 /* Helper for remove_useless_stmts_1. Handle GIMPLE_GOTO statements. */
1842
1843 static void
1844 remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
1845 {
1846 gimple stmt = gsi_stmt (*gsi);
1847
1848 tree dest = gimple_goto_dest (stmt);
1849
1850 data->may_branch = true;
1851 data->last_was_goto = false;
1852
1853 /* Record iterator for last goto expr, so that we can delete it if unnecessary. */
1854 if (TREE_CODE (dest) == LABEL_DECL)
1855 {
1856 data->last_goto_gsi = *gsi;
1857 data->last_was_goto = true;
1858 }
1859
1860 gsi_next(gsi);
1861 }
1862
1863 /* Helper for remove_useless_stmts_1. Handle GIMPLE_LABEL statements. */
1864
1865 static void
1866 remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
1867 {
1868 gimple stmt = gsi_stmt (*gsi);
1869
1870 tree label = gimple_label_label (stmt);
1871
1872 data->has_label = true;
1873
1874 /* We do want to jump across non-local label receiver code. */
1875 if (DECL_NONLOCAL (label))
1876 data->last_was_goto = false;
1877
1878 else if (data->last_was_goto
1879 && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
1880 {
1881 /* Replace the preceding GIMPLE_GOTO statement with
1882 a GIMPLE_NOP, which will be subsequently removed.
1883 In this way, we avoid invalidating other iterators
1884 active on the statement sequence. */
1885 gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
1886 data->last_was_goto = false;
1887 data->repeat = true;
1888 }
1889
1890 /* ??? Add something here to delete unused labels. */
1891
1892 gsi_next (gsi);
1893 }
1894
1895
1896 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1897
1898 void
1899 notice_special_calls (gimple call)
1900 {
1901 int flags = gimple_call_flags (call);
1902
1903 if (flags & ECF_MAY_BE_ALLOCA)
1904 cfun->calls_alloca = true;
1905 if (flags & ECF_RETURNS_TWICE)
1906 cfun->calls_setjmp = true;
1907 }
1908
1909
1910 /* Clear flags set by notice_special_calls. Used by dead code removal
1911 to update the flags. */
1912
1913 void
1914 clear_special_calls (void)
1915 {
1916 cfun->calls_alloca = false;
1917 cfun->calls_setjmp = false;
1918 }
1919
1920 /* Remove useless statements from a statement sequence, and perform
1921 some preliminary simplifications. */
1922
1923 static void
1924 remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
1925 {
1926 while (!gsi_end_p (*gsi))
1927 {
1928 gimple stmt = gsi_stmt (*gsi);
1929
1930 switch (gimple_code (stmt))
1931 {
1932 case GIMPLE_COND:
1933 remove_useless_stmts_cond (gsi, data);
1934 break;
1935
1936 case GIMPLE_GOTO:
1937 remove_useless_stmts_goto (gsi, data);
1938 break;
1939
1940 case GIMPLE_LABEL:
1941 remove_useless_stmts_label (gsi, data);
1942 break;
1943
1944 case GIMPLE_ASSIGN:
1945 fold_stmt (gsi);
1946 stmt = gsi_stmt (*gsi);
1947 data->last_was_goto = false;
1948 if (stmt_could_throw_p (stmt))
1949 data->may_throw = true;
1950 gsi_next (gsi);
1951 break;
1952
1953 case GIMPLE_ASM:
1954 fold_stmt (gsi);
1955 data->last_was_goto = false;
1956 gsi_next (gsi);
1957 break;
1958
1959 case GIMPLE_CALL:
1960 fold_stmt (gsi);
1961 stmt = gsi_stmt (*gsi);
1962 data->last_was_goto = false;
1963 if (is_gimple_call (stmt))
1964 notice_special_calls (stmt);
1965
1966 /* We used to call update_gimple_call_flags here,
1967 which copied side-effects and nothrows status
1968 from the function decl to the call. In the new
1969 tuplified GIMPLE, the accessors for this information
1970 always consult the function decl, so this copying
1971 is no longer necessary. */
1972 if (stmt_could_throw_p (stmt))
1973 data->may_throw = true;
1974 gsi_next (gsi);
1975 break;
1976
1977 case GIMPLE_RETURN:
1978 fold_stmt (gsi);
1979 data->last_was_goto = false;
1980 data->may_branch = true;
1981 gsi_next (gsi);
1982 break;
1983
1984 case GIMPLE_BIND:
1985 remove_useless_stmts_bind (gsi, data);
1986 break;
1987
1988 case GIMPLE_TRY:
1989 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
1990 remove_useless_stmts_tc (gsi, data);
1991 else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1992 remove_useless_stmts_tf (gsi, data);
1993 else
1994 gcc_unreachable ();
1995 break;
1996
1997 case GIMPLE_CATCH:
1998 gcc_unreachable ();
1999 break;
2000
2001 case GIMPLE_NOP:
2002 gsi_remove (gsi, false);
2003 break;
2004
2005 case GIMPLE_OMP_FOR:
2006 {
2007 gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
2008 gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
2009
2010 remove_useless_stmts_1 (&pre_body_gsi, data);
2011 data->last_was_goto = false;
2012 }
2013 /* FALLTHROUGH */
2014 case GIMPLE_OMP_CRITICAL:
2015 case GIMPLE_OMP_CONTINUE:
2016 case GIMPLE_OMP_MASTER:
2017 case GIMPLE_OMP_ORDERED:
2018 case GIMPLE_OMP_SECTION:
2019 case GIMPLE_OMP_SECTIONS:
2020 case GIMPLE_OMP_SINGLE:
2021 {
2022 gimple_seq body_seq = gimple_omp_body (stmt);
2023 gimple_stmt_iterator body_gsi = gsi_start (body_seq);
2024
2025 remove_useless_stmts_1 (&body_gsi, data);
2026 data->last_was_goto = false;
2027 gsi_next (gsi);
2028 }
2029 break;
2030
2031 case GIMPLE_OMP_PARALLEL:
2032 case GIMPLE_OMP_TASK:
2033 {
2034 /* Make sure the outermost GIMPLE_BIND isn't removed
2035 as useless. */
2036 gimple_seq body_seq = gimple_omp_body (stmt);
2037 gimple bind = gimple_seq_first_stmt (body_seq);
2038 gimple_seq bind_seq = gimple_bind_body (bind);
2039 gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
2040
2041 remove_useless_stmts_1 (&bind_gsi, data);
2042 data->last_was_goto = false;
2043 gsi_next (gsi);
2044 }
2045 break;
2046
2047 case GIMPLE_CHANGE_DYNAMIC_TYPE:
2048 /* If we do not optimize remove GIMPLE_CHANGE_DYNAMIC_TYPE as
2049 expansion is confused about them and we only remove them
2050 during alias computation otherwise. */
2051 if (!optimize)
2052 {
2053 data->last_was_goto = false;
2054 gsi_remove (gsi, false);
2055 break;
2056 }
2057 /* Fallthru. */
2058
2059 default:
2060 data->last_was_goto = false;
2061 gsi_next (gsi);
2062 break;
2063 }
2064 }
2065 }
2066
2067 /* Walk the function tree, removing useless statements and performing
2068 some preliminary simplifications. */
2069
2070 static unsigned int
2071 remove_useless_stmts (void)
2072 {
2073 struct rus_data data;
2074
2075 clear_special_calls ();
2076
2077 do
2078 {
2079 gimple_stmt_iterator gsi;
2080
2081 gsi = gsi_start (gimple_body (current_function_decl));
2082 memset (&data, 0, sizeof (data));
2083 remove_useless_stmts_1 (&gsi, &data);
2084 }
2085 while (data.repeat);
2086 return 0;
2087 }
2088
2089
2090 struct gimple_opt_pass pass_remove_useless_stmts =
2091 {
2092 {
2093 GIMPLE_PASS,
2094 "useless", /* name */
2095 NULL, /* gate */
2096 remove_useless_stmts, /* execute */
2097 NULL, /* sub */
2098 NULL, /* next */
2099 0, /* static_pass_number */
2100 0, /* tv_id */
2101 PROP_gimple_any, /* properties_required */
2102 0, /* properties_provided */
2103 0, /* properties_destroyed */
2104 0, /* todo_flags_start */
2105 TODO_dump_func /* todo_flags_finish */
2106 }
2107 };
2108
2109 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2110
2111 static void
2112 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2113 {
2114 /* Since this block is no longer reachable, we can just delete all
2115 of its PHI nodes. */
2116 remove_phi_nodes (bb);
2117
2118 /* Remove edges to BB's successors. */
2119 while (EDGE_COUNT (bb->succs) > 0)
2120 remove_edge (EDGE_SUCC (bb, 0));
2121 }
2122
2123
2124 /* Remove statements of basic block BB. */
2125
2126 static void
2127 remove_bb (basic_block bb)
2128 {
2129 gimple_stmt_iterator i;
2130 source_location loc = UNKNOWN_LOCATION;
2131
2132 if (dump_file)
2133 {
2134 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2135 if (dump_flags & TDF_DETAILS)
2136 {
2137 dump_bb (bb, dump_file, 0);
2138 fprintf (dump_file, "\n");
2139 }
2140 }
2141
2142 if (current_loops)
2143 {
2144 struct loop *loop = bb->loop_father;
2145
2146 /* If a loop gets removed, clean up the information associated
2147 with it. */
2148 if (loop->latch == bb
2149 || loop->header == bb)
2150 free_numbers_of_iterations_estimates_loop (loop);
2151 }
2152
2153 /* Remove all the instructions in the block. */
2154 if (bb_seq (bb) != NULL)
2155 {
2156 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2157 {
2158 gimple stmt = gsi_stmt (i);
2159 if (gimple_code (stmt) == GIMPLE_LABEL
2160 && (FORCED_LABEL (gimple_label_label (stmt))
2161 || DECL_NONLOCAL (gimple_label_label (stmt))))
2162 {
2163 basic_block new_bb;
2164 gimple_stmt_iterator new_gsi;
2165
2166 /* A non-reachable non-local label may still be referenced.
2167 But it no longer needs to carry the extra semantics of
2168 non-locality. */
2169 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2170 {
2171 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2172 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2173 }
2174
2175 new_bb = bb->prev_bb;
2176 new_gsi = gsi_start_bb (new_bb);
2177 gsi_remove (&i, false);
2178 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2179 }
2180 else
2181 {
2182 /* Release SSA definitions if we are in SSA. Note that we
2183 may be called when not in SSA. For example,
2184 final_cleanup calls this function via
2185 cleanup_tree_cfg. */
2186 if (gimple_in_ssa_p (cfun))
2187 release_defs (stmt);
2188
2189 gsi_remove (&i, true);
2190 }
2191
2192 /* Don't warn for removed gotos. Gotos are often removed due to
2193 jump threading, thus resulting in bogus warnings. Not great,
2194 since this way we lose warnings for gotos in the original
2195 program that are indeed unreachable. */
2196 if (gimple_code (stmt) != GIMPLE_GOTO
2197 && gimple_has_location (stmt)
2198 && !loc)
2199 loc = gimple_location (stmt);
2200 }
2201 }
2202
2203 /* If requested, give a warning that the first statement in the
2204 block is unreachable. We walk statements backwards in the
2205 loop above, so the last statement we process is the first statement
2206 in the block. */
2207 if (loc > BUILTINS_LOCATION && LOCATION_LINE (loc) > 0)
2208 warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
2209
2210 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2211 bb->il.gimple = NULL;
2212 }
2213
2214
2215 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2216 predicate VAL, return the edge that will be taken out of the block.
2217 If VAL does not match a unique edge, NULL is returned. */
2218
2219 edge
2220 find_taken_edge (basic_block bb, tree val)
2221 {
2222 gimple stmt;
2223
2224 stmt = last_stmt (bb);
2225
2226 gcc_assert (stmt);
2227 gcc_assert (is_ctrl_stmt (stmt));
2228
2229 if (val == NULL)
2230 return NULL;
2231
2232 if (!is_gimple_min_invariant (val))
2233 return NULL;
2234
2235 if (gimple_code (stmt) == GIMPLE_COND)
2236 return find_taken_edge_cond_expr (bb, val);
2237
2238 if (gimple_code (stmt) == GIMPLE_SWITCH)
2239 return find_taken_edge_switch_expr (bb, val);
2240
2241 if (computed_goto_p (stmt))
2242 {
2243 /* Only optimize if the argument is a label, if the argument is
2244 not a label then we can not construct a proper CFG.
2245
2246 It may be the case that we only need to allow the LABEL_REF to
2247 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2248 appear inside a LABEL_EXPR just to be safe. */
2249 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2250 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2251 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2252 return NULL;
2253 }
2254
2255 gcc_unreachable ();
2256 }
2257
2258 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2259 statement, determine which of the outgoing edges will be taken out of the
2260 block. Return NULL if either edge may be taken. */
2261
2262 static edge
2263 find_taken_edge_computed_goto (basic_block bb, tree val)
2264 {
2265 basic_block dest;
2266 edge e = NULL;
2267
2268 dest = label_to_block (val);
2269 if (dest)
2270 {
2271 e = find_edge (bb, dest);
2272 gcc_assert (e != NULL);
2273 }
2274
2275 return e;
2276 }
2277
2278 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2279 statement, determine which of the two edges will be taken out of the
2280 block. Return NULL if either edge may be taken. */
2281
2282 static edge
2283 find_taken_edge_cond_expr (basic_block bb, tree val)
2284 {
2285 edge true_edge, false_edge;
2286
2287 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2288
2289 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2290 return (integer_zerop (val) ? false_edge : true_edge);
2291 }
2292
2293 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2294 statement, determine which edge will be taken out of the block. Return
2295 NULL if any edge may be taken. */
2296
2297 static edge
2298 find_taken_edge_switch_expr (basic_block bb, tree val)
2299 {
2300 basic_block dest_bb;
2301 edge e;
2302 gimple switch_stmt;
2303 tree taken_case;
2304
2305 switch_stmt = last_stmt (bb);
2306 taken_case = find_case_label_for_value (switch_stmt, val);
2307 dest_bb = label_to_block (CASE_LABEL (taken_case));
2308
2309 e = find_edge (bb, dest_bb);
2310 gcc_assert (e);
2311 return e;
2312 }
2313
2314
2315 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2316 We can make optimal use here of the fact that the case labels are
2317 sorted: We can do a binary search for a case matching VAL. */
2318
2319 static tree
2320 find_case_label_for_value (gimple switch_stmt, tree val)
2321 {
2322 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2323 tree default_case = gimple_switch_default_label (switch_stmt);
2324
2325 for (low = 0, high = n; high - low > 1; )
2326 {
2327 size_t i = (high + low) / 2;
2328 tree t = gimple_switch_label (switch_stmt, i);
2329 int cmp;
2330
2331 /* Cache the result of comparing CASE_LOW and val. */
2332 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2333
2334 if (cmp > 0)
2335 high = i;
2336 else
2337 low = i;
2338
2339 if (CASE_HIGH (t) == NULL)
2340 {
2341 /* A singe-valued case label. */
2342 if (cmp == 0)
2343 return t;
2344 }
2345 else
2346 {
2347 /* A case range. We can only handle integer ranges. */
2348 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2349 return t;
2350 }
2351 }
2352
2353 return default_case;
2354 }
2355
2356
2357 /* Dump a basic block on stderr. */
2358
2359 void
2360 gimple_debug_bb (basic_block bb)
2361 {
2362 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2363 }
2364
2365
2366 /* Dump basic block with index N on stderr. */
2367
2368 basic_block
2369 gimple_debug_bb_n (int n)
2370 {
2371 gimple_debug_bb (BASIC_BLOCK (n));
2372 return BASIC_BLOCK (n);
2373 }
2374
2375
2376 /* Dump the CFG on stderr.
2377
2378 FLAGS are the same used by the tree dumping functions
2379 (see TDF_* in tree-pass.h). */
2380
2381 void
2382 gimple_debug_cfg (int flags)
2383 {
2384 gimple_dump_cfg (stderr, flags);
2385 }
2386
2387
2388 /* Dump the program showing basic block boundaries on the given FILE.
2389
2390 FLAGS are the same used by the tree dumping functions (see TDF_* in
2391 tree.h). */
2392
2393 void
2394 gimple_dump_cfg (FILE *file, int flags)
2395 {
2396 if (flags & TDF_DETAILS)
2397 {
2398 const char *funcname
2399 = lang_hooks.decl_printable_name (current_function_decl, 2);
2400
2401 fputc ('\n', file);
2402 fprintf (file, ";; Function %s\n\n", funcname);
2403 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2404 n_basic_blocks, n_edges, last_basic_block);
2405
2406 brief_dump_cfg (file);
2407 fprintf (file, "\n");
2408 }
2409
2410 if (flags & TDF_STATS)
2411 dump_cfg_stats (file);
2412
2413 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2414 }
2415
2416
2417 /* Dump CFG statistics on FILE. */
2418
2419 void
2420 dump_cfg_stats (FILE *file)
2421 {
2422 static long max_num_merged_labels = 0;
2423 unsigned long size, total = 0;
2424 long num_edges;
2425 basic_block bb;
2426 const char * const fmt_str = "%-30s%-13s%12s\n";
2427 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2428 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2429 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2430 const char *funcname
2431 = lang_hooks.decl_printable_name (current_function_decl, 2);
2432
2433
2434 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2435
2436 fprintf (file, "---------------------------------------------------------\n");
2437 fprintf (file, fmt_str, "", " Number of ", "Memory");
2438 fprintf (file, fmt_str, "", " instances ", "used ");
2439 fprintf (file, "---------------------------------------------------------\n");
2440
2441 size = n_basic_blocks * sizeof (struct basic_block_def);
2442 total += size;
2443 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2444 SCALE (size), LABEL (size));
2445
2446 num_edges = 0;
2447 FOR_EACH_BB (bb)
2448 num_edges += EDGE_COUNT (bb->succs);
2449 size = num_edges * sizeof (struct edge_def);
2450 total += size;
2451 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2452
2453 fprintf (file, "---------------------------------------------------------\n");
2454 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2455 LABEL (total));
2456 fprintf (file, "---------------------------------------------------------\n");
2457 fprintf (file, "\n");
2458
2459 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2460 max_num_merged_labels = cfg_stats.num_merged_labels;
2461
2462 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2463 cfg_stats.num_merged_labels, max_num_merged_labels);
2464
2465 fprintf (file, "\n");
2466 }
2467
2468
2469 /* Dump CFG statistics on stderr. Keep extern so that it's always
2470 linked in the final executable. */
2471
2472 void
2473 debug_cfg_stats (void)
2474 {
2475 dump_cfg_stats (stderr);
2476 }
2477
2478
2479 /* Dump the flowgraph to a .vcg FILE. */
2480
2481 static void
2482 gimple_cfg2vcg (FILE *file)
2483 {
2484 edge e;
2485 edge_iterator ei;
2486 basic_block bb;
2487 const char *funcname
2488 = lang_hooks.decl_printable_name (current_function_decl, 2);
2489
2490 /* Write the file header. */
2491 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2492 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2493 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2494
2495 /* Write blocks and edges. */
2496 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2497 {
2498 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2499 e->dest->index);
2500
2501 if (e->flags & EDGE_FAKE)
2502 fprintf (file, " linestyle: dotted priority: 10");
2503 else
2504 fprintf (file, " linestyle: solid priority: 100");
2505
2506 fprintf (file, " }\n");
2507 }
2508 fputc ('\n', file);
2509
2510 FOR_EACH_BB (bb)
2511 {
2512 enum gimple_code head_code, end_code;
2513 const char *head_name, *end_name;
2514 int head_line = 0;
2515 int end_line = 0;
2516 gimple first = first_stmt (bb);
2517 gimple last = last_stmt (bb);
2518
2519 if (first)
2520 {
2521 head_code = gimple_code (first);
2522 head_name = gimple_code_name[head_code];
2523 head_line = get_lineno (first);
2524 }
2525 else
2526 head_name = "no-statement";
2527
2528 if (last)
2529 {
2530 end_code = gimple_code (last);
2531 end_name = gimple_code_name[end_code];
2532 end_line = get_lineno (last);
2533 }
2534 else
2535 end_name = "no-statement";
2536
2537 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2538 bb->index, bb->index, head_name, head_line, end_name,
2539 end_line);
2540
2541 FOR_EACH_EDGE (e, ei, bb->succs)
2542 {
2543 if (e->dest == EXIT_BLOCK_PTR)
2544 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2545 else
2546 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2547
2548 if (e->flags & EDGE_FAKE)
2549 fprintf (file, " priority: 10 linestyle: dotted");
2550 else
2551 fprintf (file, " priority: 100 linestyle: solid");
2552
2553 fprintf (file, " }\n");
2554 }
2555
2556 if (bb->next_bb != EXIT_BLOCK_PTR)
2557 fputc ('\n', file);
2558 }
2559
2560 fputs ("}\n\n", file);
2561 }
2562
2563
2564
2565 /*---------------------------------------------------------------------------
2566 Miscellaneous helpers
2567 ---------------------------------------------------------------------------*/
2568
2569 /* Return true if T represents a stmt that always transfers control. */
2570
2571 bool
2572 is_ctrl_stmt (gimple t)
2573 {
2574 return gimple_code (t) == GIMPLE_COND
2575 || gimple_code (t) == GIMPLE_SWITCH
2576 || gimple_code (t) == GIMPLE_GOTO
2577 || gimple_code (t) == GIMPLE_RETURN
2578 || gimple_code (t) == GIMPLE_RESX;
2579 }
2580
2581
2582 /* Return true if T is a statement that may alter the flow of control
2583 (e.g., a call to a non-returning function). */
2584
2585 bool
2586 is_ctrl_altering_stmt (gimple t)
2587 {
2588 gcc_assert (t);
2589
2590 if (is_gimple_call (t))
2591 {
2592 int flags = gimple_call_flags (t);
2593
2594 /* A non-pure/const call alters flow control if the current
2595 function has nonlocal labels. */
2596 if (!(flags & (ECF_CONST | ECF_PURE))
2597 && cfun->has_nonlocal_label)
2598 return true;
2599
2600 /* A call also alters control flow if it does not return. */
2601 if (gimple_call_flags (t) & ECF_NORETURN)
2602 return true;
2603 }
2604
2605 /* OpenMP directives alter control flow. */
2606 if (is_gimple_omp (t))
2607 return true;
2608
2609 /* If a statement can throw, it alters control flow. */
2610 return stmt_can_throw_internal (t);
2611 }
2612
2613
2614 /* Return true if T is a simple local goto. */
2615
2616 bool
2617 simple_goto_p (gimple t)
2618 {
2619 return (gimple_code (t) == GIMPLE_GOTO
2620 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2621 }
2622
2623
2624 /* Return true if T can make an abnormal transfer of control flow.
2625 Transfers of control flow associated with EH are excluded. */
2626
2627 bool
2628 stmt_can_make_abnormal_goto (gimple t)
2629 {
2630 if (computed_goto_p (t))
2631 return true;
2632 if (is_gimple_call (t))
2633 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2634 return false;
2635 }
2636
2637
2638 /* Return true if STMT should start a new basic block. PREV_STMT is
2639 the statement preceding STMT. It is used when STMT is a label or a
2640 case label. Labels should only start a new basic block if their
2641 previous statement wasn't a label. Otherwise, sequence of labels
2642 would generate unnecessary basic blocks that only contain a single
2643 label. */
2644
2645 static inline bool
2646 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2647 {
2648 if (stmt == NULL)
2649 return false;
2650
2651 /* Labels start a new basic block only if the preceding statement
2652 wasn't a label of the same type. This prevents the creation of
2653 consecutive blocks that have nothing but a single label. */
2654 if (gimple_code (stmt) == GIMPLE_LABEL)
2655 {
2656 /* Nonlocal and computed GOTO targets always start a new block. */
2657 if (DECL_NONLOCAL (gimple_label_label (stmt))
2658 || FORCED_LABEL (gimple_label_label (stmt)))
2659 return true;
2660
2661 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2662 {
2663 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2664 return true;
2665
2666 cfg_stats.num_merged_labels++;
2667 return false;
2668 }
2669 else
2670 return true;
2671 }
2672
2673 return false;
2674 }
2675
2676
2677 /* Return true if T should end a basic block. */
2678
2679 bool
2680 stmt_ends_bb_p (gimple t)
2681 {
2682 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2683 }
2684
2685 /* Remove block annotations and other data structures. */
2686
2687 void
2688 delete_tree_cfg_annotations (void)
2689 {
2690 label_to_block_map = NULL;
2691 }
2692
2693
2694 /* Return the first statement in basic block BB. */
2695
2696 gimple
2697 first_stmt (basic_block bb)
2698 {
2699 gimple_stmt_iterator i = gsi_start_bb (bb);
2700 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2701 }
2702
2703 /* Return the last statement in basic block BB. */
2704
2705 gimple
2706 last_stmt (basic_block bb)
2707 {
2708 gimple_stmt_iterator b = gsi_last_bb (bb);
2709 return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
2710 }
2711
2712 /* Return the last statement of an otherwise empty block. Return NULL
2713 if the block is totally empty, or if it contains more than one
2714 statement. */
2715
2716 gimple
2717 last_and_only_stmt (basic_block bb)
2718 {
2719 gimple_stmt_iterator i = gsi_last_bb (bb);
2720 gimple last, prev;
2721
2722 if (gsi_end_p (i))
2723 return NULL;
2724
2725 last = gsi_stmt (i);
2726 gsi_prev (&i);
2727 if (gsi_end_p (i))
2728 return last;
2729
2730 /* Empty statements should no longer appear in the instruction stream.
2731 Everything that might have appeared before should be deleted by
2732 remove_useless_stmts, and the optimizers should just gsi_remove
2733 instead of smashing with build_empty_stmt.
2734
2735 Thus the only thing that should appear here in a block containing
2736 one executable statement is a label. */
2737 prev = gsi_stmt (i);
2738 if (gimple_code (prev) == GIMPLE_LABEL)
2739 return last;
2740 else
2741 return NULL;
2742 }
2743
2744 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2745
2746 static void
2747 reinstall_phi_args (edge new_edge, edge old_edge)
2748 {
2749 edge_var_map_vector v;
2750 edge_var_map *vm;
2751 int i;
2752 gimple_stmt_iterator phis;
2753
2754 v = redirect_edge_var_map_vector (old_edge);
2755 if (!v)
2756 return;
2757
2758 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2759 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2760 i++, gsi_next (&phis))
2761 {
2762 gimple phi = gsi_stmt (phis);
2763 tree result = redirect_edge_var_map_result (vm);
2764 tree arg = redirect_edge_var_map_def (vm);
2765
2766 gcc_assert (result == gimple_phi_result (phi));
2767
2768 add_phi_arg (phi, arg, new_edge);
2769 }
2770
2771 redirect_edge_var_map_clear (old_edge);
2772 }
2773
2774 /* Returns the basic block after which the new basic block created
2775 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2776 near its "logical" location. This is of most help to humans looking
2777 at debugging dumps. */
2778
2779 static basic_block
2780 split_edge_bb_loc (edge edge_in)
2781 {
2782 basic_block dest = edge_in->dest;
2783
2784 if (dest->prev_bb && find_edge (dest->prev_bb, dest))
2785 return edge_in->src;
2786 else
2787 return dest->prev_bb;
2788 }
2789
2790 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2791 Abort on abnormal edges. */
2792
2793 static basic_block
2794 gimple_split_edge (edge edge_in)
2795 {
2796 basic_block new_bb, after_bb, dest;
2797 edge new_edge, e;
2798
2799 /* Abnormal edges cannot be split. */
2800 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2801
2802 dest = edge_in->dest;
2803
2804 after_bb = split_edge_bb_loc (edge_in);
2805
2806 new_bb = create_empty_bb (after_bb);
2807 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2808 new_bb->count = edge_in->count;
2809 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2810 new_edge->probability = REG_BR_PROB_BASE;
2811 new_edge->count = edge_in->count;
2812
2813 e = redirect_edge_and_branch (edge_in, new_bb);
2814 gcc_assert (e == edge_in);
2815 reinstall_phi_args (new_edge, e);
2816
2817 return new_bb;
2818 }
2819
2820 /* Callback for walk_tree, check that all elements with address taken are
2821 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2822 inside a PHI node. */
2823
2824 static tree
2825 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2826 {
2827 tree t = *tp, x;
2828
2829 if (TYPE_P (t))
2830 *walk_subtrees = 0;
2831
2832 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2833 #define CHECK_OP(N, MSG) \
2834 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2835 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2836
2837 switch (TREE_CODE (t))
2838 {
2839 case SSA_NAME:
2840 if (SSA_NAME_IN_FREE_LIST (t))
2841 {
2842 error ("SSA name in freelist but still referenced");
2843 return *tp;
2844 }
2845 break;
2846
2847 case INDIRECT_REF:
2848 x = TREE_OPERAND (t, 0);
2849 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2850 {
2851 error ("Indirect reference's operand is not a register or a constant.");
2852 return x;
2853 }
2854 break;
2855
2856 case ASSERT_EXPR:
2857 x = fold (ASSERT_EXPR_COND (t));
2858 if (x == boolean_false_node)
2859 {
2860 error ("ASSERT_EXPR with an always-false condition");
2861 return *tp;
2862 }
2863 break;
2864
2865 case MODIFY_EXPR:
2866 error ("MODIFY_EXPR not expected while having tuples.");
2867 return *tp;
2868
2869 case ADDR_EXPR:
2870 {
2871 bool old_constant;
2872 bool old_side_effects;
2873 bool new_constant;
2874 bool new_side_effects;
2875
2876 gcc_assert (is_gimple_address (t));
2877
2878 old_constant = TREE_CONSTANT (t);
2879 old_side_effects = TREE_SIDE_EFFECTS (t);
2880
2881 recompute_tree_invariant_for_addr_expr (t);
2882 new_side_effects = TREE_SIDE_EFFECTS (t);
2883 new_constant = TREE_CONSTANT (t);
2884
2885 if (old_constant != new_constant)
2886 {
2887 error ("constant not recomputed when ADDR_EXPR changed");
2888 return t;
2889 }
2890 if (old_side_effects != new_side_effects)
2891 {
2892 error ("side effects not recomputed when ADDR_EXPR changed");
2893 return t;
2894 }
2895
2896 /* Skip any references (they will be checked when we recurse down the
2897 tree) and ensure that any variable used as a prefix is marked
2898 addressable. */
2899 for (x = TREE_OPERAND (t, 0);
2900 handled_component_p (x);
2901 x = TREE_OPERAND (x, 0))
2902 ;
2903
2904 if (!(TREE_CODE (x) == VAR_DECL
2905 || TREE_CODE (x) == PARM_DECL
2906 || TREE_CODE (x) == RESULT_DECL))
2907 return NULL;
2908 if (!TREE_ADDRESSABLE (x))
2909 {
2910 error ("address taken, but ADDRESSABLE bit not set");
2911 return x;
2912 }
2913 if (DECL_GIMPLE_REG_P (x))
2914 {
2915 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2916 return x;
2917 }
2918
2919 break;
2920 }
2921
2922 case COND_EXPR:
2923 x = COND_EXPR_COND (t);
2924 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2925 {
2926 error ("non-integral used in condition");
2927 return x;
2928 }
2929 if (!is_gimple_condexpr (x))
2930 {
2931 error ("invalid conditional operand");
2932 return x;
2933 }
2934 break;
2935
2936 case NON_LVALUE_EXPR:
2937 gcc_unreachable ();
2938
2939 CASE_CONVERT:
2940 case FIX_TRUNC_EXPR:
2941 case FLOAT_EXPR:
2942 case NEGATE_EXPR:
2943 case ABS_EXPR:
2944 case BIT_NOT_EXPR:
2945 case TRUTH_NOT_EXPR:
2946 CHECK_OP (0, "invalid operand to unary operator");
2947 break;
2948
2949 case REALPART_EXPR:
2950 case IMAGPART_EXPR:
2951 case COMPONENT_REF:
2952 case ARRAY_REF:
2953 case ARRAY_RANGE_REF:
2954 case BIT_FIELD_REF:
2955 case VIEW_CONVERT_EXPR:
2956 /* We have a nest of references. Verify that each of the operands
2957 that determine where to reference is either a constant or a variable,
2958 verify that the base is valid, and then show we've already checked
2959 the subtrees. */
2960 while (handled_component_p (t))
2961 {
2962 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2963 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2964 else if (TREE_CODE (t) == ARRAY_REF
2965 || TREE_CODE (t) == ARRAY_RANGE_REF)
2966 {
2967 CHECK_OP (1, "invalid array index");
2968 if (TREE_OPERAND (t, 2))
2969 CHECK_OP (2, "invalid array lower bound");
2970 if (TREE_OPERAND (t, 3))
2971 CHECK_OP (3, "invalid array stride");
2972 }
2973 else if (TREE_CODE (t) == BIT_FIELD_REF)
2974 {
2975 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2976 || !host_integerp (TREE_OPERAND (t, 2), 1))
2977 {
2978 error ("invalid position or size operand to BIT_FIELD_REF");
2979 return t;
2980 }
2981 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2982 && (TYPE_PRECISION (TREE_TYPE (t))
2983 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2984 {
2985 error ("integral result type precision does not match "
2986 "field size of BIT_FIELD_REF");
2987 return t;
2988 }
2989 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2990 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2991 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2992 {
2993 error ("mode precision of non-integral result does not "
2994 "match field size of BIT_FIELD_REF");
2995 return t;
2996 }
2997 }
2998
2999 t = TREE_OPERAND (t, 0);
3000 }
3001
3002 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3003 {
3004 error ("invalid reference prefix");
3005 return t;
3006 }
3007 *walk_subtrees = 0;
3008 break;
3009 case PLUS_EXPR:
3010 case MINUS_EXPR:
3011 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3012 POINTER_PLUS_EXPR. */
3013 if (POINTER_TYPE_P (TREE_TYPE (t)))
3014 {
3015 error ("invalid operand to plus/minus, type is a pointer");
3016 return t;
3017 }
3018 CHECK_OP (0, "invalid operand to binary operator");
3019 CHECK_OP (1, "invalid operand to binary operator");
3020 break;
3021
3022 case POINTER_PLUS_EXPR:
3023 /* Check to make sure the first operand is a pointer or reference type. */
3024 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3025 {
3026 error ("invalid operand to pointer plus, first operand is not a pointer");
3027 return t;
3028 }
3029 /* Check to make sure the second operand is an integer with type of
3030 sizetype. */
3031 if (!useless_type_conversion_p (sizetype,
3032 TREE_TYPE (TREE_OPERAND (t, 1))))
3033 {
3034 error ("invalid operand to pointer plus, second operand is not an "
3035 "integer with type of sizetype.");
3036 return t;
3037 }
3038 /* FALLTHROUGH */
3039 case LT_EXPR:
3040 case LE_EXPR:
3041 case GT_EXPR:
3042 case GE_EXPR:
3043 case EQ_EXPR:
3044 case NE_EXPR:
3045 case UNORDERED_EXPR:
3046 case ORDERED_EXPR:
3047 case UNLT_EXPR:
3048 case UNLE_EXPR:
3049 case UNGT_EXPR:
3050 case UNGE_EXPR:
3051 case UNEQ_EXPR:
3052 case LTGT_EXPR:
3053 case MULT_EXPR:
3054 case TRUNC_DIV_EXPR:
3055 case CEIL_DIV_EXPR:
3056 case FLOOR_DIV_EXPR:
3057 case ROUND_DIV_EXPR:
3058 case TRUNC_MOD_EXPR:
3059 case CEIL_MOD_EXPR:
3060 case FLOOR_MOD_EXPR:
3061 case ROUND_MOD_EXPR:
3062 case RDIV_EXPR:
3063 case EXACT_DIV_EXPR:
3064 case MIN_EXPR:
3065 case MAX_EXPR:
3066 case LSHIFT_EXPR:
3067 case RSHIFT_EXPR:
3068 case LROTATE_EXPR:
3069 case RROTATE_EXPR:
3070 case BIT_IOR_EXPR:
3071 case BIT_XOR_EXPR:
3072 case BIT_AND_EXPR:
3073 CHECK_OP (0, "invalid operand to binary operator");
3074 CHECK_OP (1, "invalid operand to binary operator");
3075 break;
3076
3077 case CONSTRUCTOR:
3078 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3079 *walk_subtrees = 0;
3080 break;
3081
3082 default:
3083 break;
3084 }
3085 return NULL;
3086
3087 #undef CHECK_OP
3088 }
3089
3090
3091 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3092 Returns true if there is an error, otherwise false. */
3093
3094 static bool
3095 verify_types_in_gimple_min_lval (tree expr)
3096 {
3097 tree op;
3098
3099 if (is_gimple_id (expr))
3100 return false;
3101
3102 if (!INDIRECT_REF_P (expr)
3103 && TREE_CODE (expr) != TARGET_MEM_REF)
3104 {
3105 error ("invalid expression for min lvalue");
3106 return true;
3107 }
3108
3109 /* TARGET_MEM_REFs are strange beasts. */
3110 if (TREE_CODE (expr) == TARGET_MEM_REF)
3111 return false;
3112
3113 op = TREE_OPERAND (expr, 0);
3114 if (!is_gimple_val (op))
3115 {
3116 error ("invalid operand in indirect reference");
3117 debug_generic_stmt (op);
3118 return true;
3119 }
3120 if (!useless_type_conversion_p (TREE_TYPE (expr),
3121 TREE_TYPE (TREE_TYPE (op))))
3122 {
3123 error ("type mismatch in indirect reference");
3124 debug_generic_stmt (TREE_TYPE (expr));
3125 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3126 return true;
3127 }
3128
3129 return false;
3130 }
3131
3132 /* Verify if EXPR is a valid GIMPLE reference expression. Returns true
3133 if there is an error, otherwise false. */
3134
3135 static bool
3136 verify_types_in_gimple_reference (tree expr)
3137 {
3138 while (handled_component_p (expr))
3139 {
3140 tree op = TREE_OPERAND (expr, 0);
3141
3142 if (TREE_CODE (expr) == ARRAY_REF
3143 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3144 {
3145 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3146 || (TREE_OPERAND (expr, 2)
3147 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3148 || (TREE_OPERAND (expr, 3)
3149 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3150 {
3151 error ("invalid operands to array reference");
3152 debug_generic_stmt (expr);
3153 return true;
3154 }
3155 }
3156
3157 /* Verify if the reference array element types are compatible. */
3158 if (TREE_CODE (expr) == ARRAY_REF
3159 && !useless_type_conversion_p (TREE_TYPE (expr),
3160 TREE_TYPE (TREE_TYPE (op))))
3161 {
3162 error ("type mismatch in array reference");
3163 debug_generic_stmt (TREE_TYPE (expr));
3164 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3165 return true;
3166 }
3167 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3168 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3169 TREE_TYPE (TREE_TYPE (op))))
3170 {
3171 error ("type mismatch in array range reference");
3172 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3173 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3174 return true;
3175 }
3176
3177 if ((TREE_CODE (expr) == REALPART_EXPR
3178 || TREE_CODE (expr) == IMAGPART_EXPR)
3179 && !useless_type_conversion_p (TREE_TYPE (expr),
3180 TREE_TYPE (TREE_TYPE (op))))
3181 {
3182 error ("type mismatch in real/imagpart reference");
3183 debug_generic_stmt (TREE_TYPE (expr));
3184 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3185 return true;
3186 }
3187
3188 if (TREE_CODE (expr) == COMPONENT_REF
3189 && !useless_type_conversion_p (TREE_TYPE (expr),
3190 TREE_TYPE (TREE_OPERAND (expr, 1))))
3191 {
3192 error ("type mismatch in component reference");
3193 debug_generic_stmt (TREE_TYPE (expr));
3194 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3195 return true;
3196 }
3197
3198 /* For VIEW_CONVERT_EXPRs which are allowed here, too, there
3199 is nothing to verify. Gross mismatches at most invoke
3200 undefined behavior. */
3201 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3202 && !handled_component_p (op))
3203 return false;
3204
3205 expr = op;
3206 }
3207
3208 return verify_types_in_gimple_min_lval (expr);
3209 }
3210
3211 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3212 list of pointer-to types that is trivially convertible to DEST. */
3213
3214 static bool
3215 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3216 {
3217 tree src;
3218
3219 if (!TYPE_POINTER_TO (src_obj))
3220 return true;
3221
3222 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3223 if (useless_type_conversion_p (dest, src))
3224 return true;
3225
3226 return false;
3227 }
3228
3229 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3230 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3231
3232 static bool
3233 valid_fixed_convert_types_p (tree type1, tree type2)
3234 {
3235 return (FIXED_POINT_TYPE_P (type1)
3236 && (INTEGRAL_TYPE_P (type2)
3237 || SCALAR_FLOAT_TYPE_P (type2)
3238 || FIXED_POINT_TYPE_P (type2)));
3239 }
3240
3241 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3242 is a problem, otherwise false. */
3243
3244 static bool
3245 verify_gimple_call (gimple stmt)
3246 {
3247 tree fn = gimple_call_fn (stmt);
3248 tree fntype;
3249
3250 if (!POINTER_TYPE_P (TREE_TYPE (fn))
3251 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3252 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
3253 {
3254 error ("non-function in gimple call");
3255 return true;
3256 }
3257
3258 if (gimple_call_lhs (stmt)
3259 && !is_gimple_lvalue (gimple_call_lhs (stmt)))
3260 {
3261 error ("invalid LHS in gimple call");
3262 return true;
3263 }
3264
3265 fntype = TREE_TYPE (TREE_TYPE (fn));
3266 if (gimple_call_lhs (stmt)
3267 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3268 TREE_TYPE (fntype))
3269 /* ??? At least C++ misses conversions at assignments from
3270 void * call results.
3271 ??? Java is completely off. Especially with functions
3272 returning java.lang.Object.
3273 For now simply allow arbitrary pointer type conversions. */
3274 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3275 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3276 {
3277 error ("invalid conversion in gimple call");
3278 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3279 debug_generic_stmt (TREE_TYPE (fntype));
3280 return true;
3281 }
3282
3283 /* ??? The C frontend passes unpromoted arguments in case it
3284 didn't see a function declaration before the call. So for now
3285 leave the call arguments unverified. Once we gimplify
3286 unit-at-a-time we have a chance to fix this. */
3287
3288 return false;
3289 }
3290
3291 /* Verifies the gimple comparison with the result type TYPE and
3292 the operands OP0 and OP1. */
3293
3294 static bool
3295 verify_gimple_comparison (tree type, tree op0, tree op1)
3296 {
3297 tree op0_type = TREE_TYPE (op0);
3298 tree op1_type = TREE_TYPE (op1);
3299
3300 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3301 {
3302 error ("invalid operands in gimple comparison");
3303 return true;
3304 }
3305
3306 /* For comparisons we do not have the operations type as the
3307 effective type the comparison is carried out in. Instead
3308 we require that either the first operand is trivially
3309 convertible into the second, or the other way around.
3310 The resulting type of a comparison may be any integral type.
3311 Because we special-case pointers to void we allow
3312 comparisons of pointers with the same mode as well. */
3313 if ((!useless_type_conversion_p (op0_type, op1_type)
3314 && !useless_type_conversion_p (op1_type, op0_type)
3315 && (!POINTER_TYPE_P (op0_type)
3316 || !POINTER_TYPE_P (op1_type)
3317 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3318 || !INTEGRAL_TYPE_P (type))
3319 {
3320 error ("type mismatch in comparison expression");
3321 debug_generic_expr (type);
3322 debug_generic_expr (op0_type);
3323 debug_generic_expr (op1_type);
3324 return true;
3325 }
3326
3327 return false;
3328 }
3329
3330 /* Verify a gimple assignment statement STMT with an unary rhs.
3331 Returns true if anything is wrong. */
3332
3333 static bool
3334 verify_gimple_assign_unary (gimple stmt)
3335 {
3336 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3337 tree lhs = gimple_assign_lhs (stmt);
3338 tree lhs_type = TREE_TYPE (lhs);
3339 tree rhs1 = gimple_assign_rhs1 (stmt);
3340 tree rhs1_type = TREE_TYPE (rhs1);
3341
3342 if (!is_gimple_reg (lhs)
3343 && !(optimize == 0
3344 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3345 {
3346 error ("non-register as LHS of unary operation");
3347 return true;
3348 }
3349
3350 if (!is_gimple_val (rhs1))
3351 {
3352 error ("invalid operand in unary operation");
3353 return true;
3354 }
3355
3356 /* First handle conversions. */
3357 switch (rhs_code)
3358 {
3359 CASE_CONVERT:
3360 {
3361 /* Allow conversions between integral types and pointers only if
3362 there is no sign or zero extension involved.
3363 For targets were the precision of sizetype doesn't match that
3364 of pointers we need to allow arbitrary conversions from and
3365 to sizetype. */
3366 if ((POINTER_TYPE_P (lhs_type)
3367 && INTEGRAL_TYPE_P (rhs1_type)
3368 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3369 || rhs1_type == sizetype))
3370 || (POINTER_TYPE_P (rhs1_type)
3371 && INTEGRAL_TYPE_P (lhs_type)
3372 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3373 || lhs_type == sizetype)))
3374 return false;
3375
3376 /* Allow conversion from integer to offset type and vice versa. */
3377 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3378 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3379 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3380 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3381 return false;
3382
3383 /* Otherwise assert we are converting between types of the
3384 same kind. */
3385 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3386 {
3387 error ("invalid types in nop conversion");
3388 debug_generic_expr (lhs_type);
3389 debug_generic_expr (rhs1_type);
3390 return true;
3391 }
3392
3393 return false;
3394 }
3395
3396 case FIXED_CONVERT_EXPR:
3397 {
3398 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3399 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3400 {
3401 error ("invalid types in fixed-point conversion");
3402 debug_generic_expr (lhs_type);
3403 debug_generic_expr (rhs1_type);
3404 return true;
3405 }
3406
3407 return false;
3408 }
3409
3410 case FLOAT_EXPR:
3411 {
3412 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3413 {
3414 error ("invalid types in conversion to floating point");
3415 debug_generic_expr (lhs_type);
3416 debug_generic_expr (rhs1_type);
3417 return true;
3418 }
3419
3420 return false;
3421 }
3422
3423 case FIX_TRUNC_EXPR:
3424 {
3425 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3426 {
3427 error ("invalid types in conversion to integer");
3428 debug_generic_expr (lhs_type);
3429 debug_generic_expr (rhs1_type);
3430 return true;
3431 }
3432
3433 return false;
3434 }
3435
3436 case VEC_UNPACK_HI_EXPR:
3437 case VEC_UNPACK_LO_EXPR:
3438 case REDUC_MAX_EXPR:
3439 case REDUC_MIN_EXPR:
3440 case REDUC_PLUS_EXPR:
3441 case VEC_UNPACK_FLOAT_HI_EXPR:
3442 case VEC_UNPACK_FLOAT_LO_EXPR:
3443 /* FIXME. */
3444 return false;
3445
3446 case TRUTH_NOT_EXPR:
3447 case NEGATE_EXPR:
3448 case ABS_EXPR:
3449 case BIT_NOT_EXPR:
3450 case PAREN_EXPR:
3451 case NON_LVALUE_EXPR:
3452 case CONJ_EXPR:
3453 break;
3454
3455 default:
3456 gcc_unreachable ();
3457 }
3458
3459 /* For the remaining codes assert there is no conversion involved. */
3460 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3461 {
3462 error ("non-trivial conversion in unary operation");
3463 debug_generic_expr (lhs_type);
3464 debug_generic_expr (rhs1_type);
3465 return true;
3466 }
3467
3468 return false;
3469 }
3470
3471 /* Verify a gimple assignment statement STMT with a binary rhs.
3472 Returns true if anything is wrong. */
3473
3474 static bool
3475 verify_gimple_assign_binary (gimple stmt)
3476 {
3477 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3478 tree lhs = gimple_assign_lhs (stmt);
3479 tree lhs_type = TREE_TYPE (lhs);
3480 tree rhs1 = gimple_assign_rhs1 (stmt);
3481 tree rhs1_type = TREE_TYPE (rhs1);
3482 tree rhs2 = gimple_assign_rhs2 (stmt);
3483 tree rhs2_type = TREE_TYPE (rhs2);
3484
3485 if (!is_gimple_reg (lhs)
3486 && !(optimize == 0
3487 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3488 {
3489 error ("non-register as LHS of binary operation");
3490 return true;
3491 }
3492
3493 if (!is_gimple_val (rhs1)
3494 || !is_gimple_val (rhs2))
3495 {
3496 error ("invalid operands in binary operation");
3497 return true;
3498 }
3499
3500 /* First handle operations that involve different types. */
3501 switch (rhs_code)
3502 {
3503 case COMPLEX_EXPR:
3504 {
3505 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3506 || !(INTEGRAL_TYPE_P (rhs1_type)
3507 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3508 || !(INTEGRAL_TYPE_P (rhs2_type)
3509 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3510 {
3511 error ("type mismatch in complex expression");
3512 debug_generic_expr (lhs_type);
3513 debug_generic_expr (rhs1_type);
3514 debug_generic_expr (rhs2_type);
3515 return true;
3516 }
3517
3518 return false;
3519 }
3520
3521 case LSHIFT_EXPR:
3522 case RSHIFT_EXPR:
3523 case LROTATE_EXPR:
3524 case RROTATE_EXPR:
3525 {
3526 /* Shifts and rotates are ok on integral types, fixed point
3527 types and integer vector types. */
3528 if ((!INTEGRAL_TYPE_P (rhs1_type)
3529 && !FIXED_POINT_TYPE_P (rhs1_type)
3530 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3531 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE))
3532 || (!INTEGRAL_TYPE_P (rhs2_type)
3533 /* Vector shifts of vectors are also ok. */
3534 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3535 && TREE_CODE (TREE_TYPE (rhs1_type)) == INTEGER_TYPE
3536 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3537 && TREE_CODE (TREE_TYPE (rhs2_type)) == INTEGER_TYPE))
3538 || !useless_type_conversion_p (lhs_type, rhs1_type))
3539 {
3540 error ("type mismatch in shift expression");
3541 debug_generic_expr (lhs_type);
3542 debug_generic_expr (rhs1_type);
3543 debug_generic_expr (rhs2_type);
3544 return true;
3545 }
3546
3547 return false;
3548 }
3549
3550 case VEC_LSHIFT_EXPR:
3551 case VEC_RSHIFT_EXPR:
3552 {
3553 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3554 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3555 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type)))
3556 || (!INTEGRAL_TYPE_P (rhs2_type)
3557 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3558 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3559 || !useless_type_conversion_p (lhs_type, rhs1_type))
3560 {
3561 error ("type mismatch in vector shift expression");
3562 debug_generic_expr (lhs_type);
3563 debug_generic_expr (rhs1_type);
3564 debug_generic_expr (rhs2_type);
3565 return true;
3566 }
3567
3568 return false;
3569 }
3570
3571 case POINTER_PLUS_EXPR:
3572 {
3573 if (!POINTER_TYPE_P (rhs1_type)
3574 || !useless_type_conversion_p (lhs_type, rhs1_type)
3575 || !useless_type_conversion_p (sizetype, rhs2_type))
3576 {
3577 error ("type mismatch in pointer plus expression");
3578 debug_generic_stmt (lhs_type);
3579 debug_generic_stmt (rhs1_type);
3580 debug_generic_stmt (rhs2_type);
3581 return true;
3582 }
3583
3584 return false;
3585 }
3586
3587 case TRUTH_ANDIF_EXPR:
3588 case TRUTH_ORIF_EXPR:
3589 gcc_unreachable ();
3590
3591 case TRUTH_AND_EXPR:
3592 case TRUTH_OR_EXPR:
3593 case TRUTH_XOR_EXPR:
3594 {
3595 /* We allow any kind of integral typed argument and result. */
3596 if (!INTEGRAL_TYPE_P (rhs1_type)
3597 || !INTEGRAL_TYPE_P (rhs2_type)
3598 || !INTEGRAL_TYPE_P (lhs_type))
3599 {
3600 error ("type mismatch in binary truth expression");
3601 debug_generic_expr (lhs_type);
3602 debug_generic_expr (rhs1_type);
3603 debug_generic_expr (rhs2_type);
3604 return true;
3605 }
3606
3607 return false;
3608 }
3609
3610 case LT_EXPR:
3611 case LE_EXPR:
3612 case GT_EXPR:
3613 case GE_EXPR:
3614 case EQ_EXPR:
3615 case NE_EXPR:
3616 case UNORDERED_EXPR:
3617 case ORDERED_EXPR:
3618 case UNLT_EXPR:
3619 case UNLE_EXPR:
3620 case UNGT_EXPR:
3621 case UNGE_EXPR:
3622 case UNEQ_EXPR:
3623 case LTGT_EXPR:
3624 /* Comparisons are also binary, but the result type is not
3625 connected to the operand types. */
3626 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3627
3628 case PLUS_EXPR:
3629 case MINUS_EXPR:
3630 {
3631 if (POINTER_TYPE_P (lhs_type)
3632 || POINTER_TYPE_P (rhs1_type)
3633 || POINTER_TYPE_P (rhs2_type))
3634 {
3635 error ("invalid (pointer) operands to plus/minus");
3636 return true;
3637 }
3638
3639 /* Continue with generic binary expression handling. */
3640 break;
3641 }
3642
3643 case WIDEN_SUM_EXPR:
3644 case WIDEN_MULT_EXPR:
3645 case VEC_WIDEN_MULT_HI_EXPR:
3646 case VEC_WIDEN_MULT_LO_EXPR:
3647 case VEC_PACK_TRUNC_EXPR:
3648 case VEC_PACK_SAT_EXPR:
3649 case VEC_PACK_FIX_TRUNC_EXPR:
3650 case VEC_EXTRACT_EVEN_EXPR:
3651 case VEC_EXTRACT_ODD_EXPR:
3652 case VEC_INTERLEAVE_HIGH_EXPR:
3653 case VEC_INTERLEAVE_LOW_EXPR:
3654 /* FIXME. */
3655 return false;
3656
3657 case MULT_EXPR:
3658 case TRUNC_DIV_EXPR:
3659 case CEIL_DIV_EXPR:
3660 case FLOOR_DIV_EXPR:
3661 case ROUND_DIV_EXPR:
3662 case TRUNC_MOD_EXPR:
3663 case CEIL_MOD_EXPR:
3664 case FLOOR_MOD_EXPR:
3665 case ROUND_MOD_EXPR:
3666 case RDIV_EXPR:
3667 case EXACT_DIV_EXPR:
3668 case MIN_EXPR:
3669 case MAX_EXPR:
3670 case BIT_IOR_EXPR:
3671 case BIT_XOR_EXPR:
3672 case BIT_AND_EXPR:
3673 /* Continue with generic binary expression handling. */
3674 break;
3675
3676 default:
3677 gcc_unreachable ();
3678 }
3679
3680 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3681 || !useless_type_conversion_p (lhs_type, rhs2_type))
3682 {
3683 error ("type mismatch in binary expression");
3684 debug_generic_stmt (lhs_type);
3685 debug_generic_stmt (rhs1_type);
3686 debug_generic_stmt (rhs2_type);
3687 return true;
3688 }
3689
3690 return false;
3691 }
3692
3693 /* Verify a gimple assignment statement STMT with a single rhs.
3694 Returns true if anything is wrong. */
3695
3696 static bool
3697 verify_gimple_assign_single (gimple stmt)
3698 {
3699 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3700 tree lhs = gimple_assign_lhs (stmt);
3701 tree lhs_type = TREE_TYPE (lhs);
3702 tree rhs1 = gimple_assign_rhs1 (stmt);
3703 tree rhs1_type = TREE_TYPE (rhs1);
3704 bool res = false;
3705
3706 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3707 {
3708 error ("non-trivial conversion at assignment");
3709 debug_generic_expr (lhs_type);
3710 debug_generic_expr (rhs1_type);
3711 return true;
3712 }
3713
3714 if (handled_component_p (lhs))
3715 res |= verify_types_in_gimple_reference (lhs);
3716
3717 /* Special codes we cannot handle via their class. */
3718 switch (rhs_code)
3719 {
3720 case ADDR_EXPR:
3721 {
3722 tree op = TREE_OPERAND (rhs1, 0);
3723 if (!is_gimple_addressable (op))
3724 {
3725 error ("invalid operand in unary expression");
3726 return true;
3727 }
3728
3729 if (!one_pointer_to_useless_type_conversion_p (lhs_type,
3730 TREE_TYPE (op)))
3731 {
3732 error ("type mismatch in address expression");
3733 debug_generic_stmt (lhs_type);
3734 debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
3735 return true;
3736 }
3737
3738 return verify_types_in_gimple_reference (op);
3739 }
3740
3741 /* tcc_reference */
3742 case COMPONENT_REF:
3743 case BIT_FIELD_REF:
3744 case INDIRECT_REF:
3745 case ALIGN_INDIRECT_REF:
3746 case MISALIGNED_INDIRECT_REF:
3747 case ARRAY_REF:
3748 case ARRAY_RANGE_REF:
3749 case VIEW_CONVERT_EXPR:
3750 case REALPART_EXPR:
3751 case IMAGPART_EXPR:
3752 case TARGET_MEM_REF:
3753 if (!is_gimple_reg (lhs)
3754 && is_gimple_reg_type (TREE_TYPE (lhs)))
3755 {
3756 error ("invalid rhs for gimple memory store");
3757 debug_generic_stmt (lhs);
3758 debug_generic_stmt (rhs1);
3759 return true;
3760 }
3761 return res || verify_types_in_gimple_reference (rhs1);
3762
3763 /* tcc_constant */
3764 case SSA_NAME:
3765 case INTEGER_CST:
3766 case REAL_CST:
3767 case FIXED_CST:
3768 case COMPLEX_CST:
3769 case VECTOR_CST:
3770 case STRING_CST:
3771 return res;
3772
3773 /* tcc_declaration */
3774 case CONST_DECL:
3775 return res;
3776 case VAR_DECL:
3777 case PARM_DECL:
3778 if (!is_gimple_reg (lhs)
3779 && !is_gimple_reg (rhs1)
3780 && is_gimple_reg_type (TREE_TYPE (lhs)))
3781 {
3782 error ("invalid rhs for gimple memory store");
3783 debug_generic_stmt (lhs);
3784 debug_generic_stmt (rhs1);
3785 return true;
3786 }
3787 return res;
3788
3789 case COND_EXPR:
3790 case CONSTRUCTOR:
3791 case OBJ_TYPE_REF:
3792 case ASSERT_EXPR:
3793 case WITH_SIZE_EXPR:
3794 case EXC_PTR_EXPR:
3795 case FILTER_EXPR:
3796 case POLYNOMIAL_CHREC:
3797 case DOT_PROD_EXPR:
3798 case VEC_COND_EXPR:
3799 case REALIGN_LOAD_EXPR:
3800 /* FIXME. */
3801 return res;
3802
3803 default:;
3804 }
3805
3806 return res;
3807 }
3808
3809 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3810 is a problem, otherwise false. */
3811
3812 static bool
3813 verify_gimple_assign (gimple stmt)
3814 {
3815 switch (gimple_assign_rhs_class (stmt))
3816 {
3817 case GIMPLE_SINGLE_RHS:
3818 return verify_gimple_assign_single (stmt);
3819
3820 case GIMPLE_UNARY_RHS:
3821 return verify_gimple_assign_unary (stmt);
3822
3823 case GIMPLE_BINARY_RHS:
3824 return verify_gimple_assign_binary (stmt);
3825
3826 default:
3827 gcc_unreachable ();
3828 }
3829 }
3830
3831 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3832 is a problem, otherwise false. */
3833
3834 static bool
3835 verify_gimple_return (gimple stmt)
3836 {
3837 tree op = gimple_return_retval (stmt);
3838 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3839
3840 /* We cannot test for present return values as we do not fix up missing
3841 return values from the original source. */
3842 if (op == NULL)
3843 return false;
3844
3845 if (!is_gimple_val (op)
3846 && TREE_CODE (op) != RESULT_DECL)
3847 {
3848 error ("invalid operand in return statement");
3849 debug_generic_stmt (op);
3850 return true;
3851 }
3852
3853 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3854 /* ??? With C++ we can have the situation that the result
3855 decl is a reference type while the return type is an aggregate. */
3856 && !(TREE_CODE (op) == RESULT_DECL
3857 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3858 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3859 {
3860 error ("invalid conversion in return statement");
3861 debug_generic_stmt (restype);
3862 debug_generic_stmt (TREE_TYPE (op));
3863 return true;
3864 }
3865
3866 return false;
3867 }
3868
3869
3870 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3871 is a problem, otherwise false. */
3872
3873 static bool
3874 verify_gimple_goto (gimple stmt)
3875 {
3876 tree dest = gimple_goto_dest (stmt);
3877
3878 /* ??? We have two canonical forms of direct goto destinations, a
3879 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3880 if (TREE_CODE (dest) != LABEL_DECL
3881 && (!is_gimple_val (dest)
3882 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3883 {
3884 error ("goto destination is neither a label nor a pointer");
3885 return true;
3886 }
3887
3888 return false;
3889 }
3890
3891 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3892 is a problem, otherwise false. */
3893
3894 static bool
3895 verify_gimple_switch (gimple stmt)
3896 {
3897 if (!is_gimple_val (gimple_switch_index (stmt)))
3898 {
3899 error ("invalid operand to switch statement");
3900 debug_generic_stmt (gimple_switch_index (stmt));
3901 return true;
3902 }
3903
3904 return false;
3905 }
3906
3907
3908 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3909 and false otherwise. */
3910
3911 static bool
3912 verify_gimple_phi (gimple stmt)
3913 {
3914 tree type = TREE_TYPE (gimple_phi_result (stmt));
3915 unsigned i;
3916
3917 if (!is_gimple_variable (gimple_phi_result (stmt)))
3918 {
3919 error ("Invalid PHI result");
3920 return true;
3921 }
3922
3923 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3924 {
3925 tree arg = gimple_phi_arg_def (stmt, i);
3926 if ((is_gimple_reg (gimple_phi_result (stmt))
3927 && !is_gimple_val (arg))
3928 || (!is_gimple_reg (gimple_phi_result (stmt))
3929 && !is_gimple_addressable (arg)))
3930 {
3931 error ("Invalid PHI argument");
3932 debug_generic_stmt (arg);
3933 return true;
3934 }
3935 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3936 {
3937 error ("Incompatible types in PHI argument %u", i);
3938 debug_generic_stmt (type);
3939 debug_generic_stmt (TREE_TYPE (arg));
3940 return true;
3941 }
3942 }
3943
3944 return false;
3945 }
3946
3947
3948 /* Verify the GIMPLE statement STMT. Returns true if there is an
3949 error, otherwise false. */
3950
3951 static bool
3952 verify_types_in_gimple_stmt (gimple stmt)
3953 {
3954 if (is_gimple_omp (stmt))
3955 {
3956 /* OpenMP directives are validated by the FE and never operated
3957 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3958 non-gimple expressions when the main index variable has had
3959 its address taken. This does not affect the loop itself
3960 because the header of an GIMPLE_OMP_FOR is merely used to determine
3961 how to setup the parallel iteration. */
3962 return false;
3963 }
3964
3965 switch (gimple_code (stmt))
3966 {
3967 case GIMPLE_ASSIGN:
3968 return verify_gimple_assign (stmt);
3969
3970 case GIMPLE_LABEL:
3971 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3972
3973 case GIMPLE_CALL:
3974 return verify_gimple_call (stmt);
3975
3976 case GIMPLE_COND:
3977 return verify_gimple_comparison (boolean_type_node,
3978 gimple_cond_lhs (stmt),
3979 gimple_cond_rhs (stmt));
3980
3981 case GIMPLE_GOTO:
3982 return verify_gimple_goto (stmt);
3983
3984 case GIMPLE_SWITCH:
3985 return verify_gimple_switch (stmt);
3986
3987 case GIMPLE_RETURN:
3988 return verify_gimple_return (stmt);
3989
3990 case GIMPLE_ASM:
3991 return false;
3992
3993 case GIMPLE_CHANGE_DYNAMIC_TYPE:
3994 return (!is_gimple_val (gimple_cdt_location (stmt))
3995 || !POINTER_TYPE_P (TREE_TYPE (gimple_cdt_location (stmt))));
3996
3997 case GIMPLE_PHI:
3998 return verify_gimple_phi (stmt);
3999
4000 /* Tuples that do not have tree operands. */
4001 case GIMPLE_NOP:
4002 case GIMPLE_RESX:
4003 case GIMPLE_PREDICT:
4004 return false;
4005
4006 default:
4007 gcc_unreachable ();
4008 }
4009 }
4010
4011 /* Verify the GIMPLE statements inside the sequence STMTS. */
4012
4013 static bool
4014 verify_types_in_gimple_seq_2 (gimple_seq stmts)
4015 {
4016 gimple_stmt_iterator ittr;
4017 bool err = false;
4018
4019 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4020 {
4021 gimple stmt = gsi_stmt (ittr);
4022
4023 switch (gimple_code (stmt))
4024 {
4025 case GIMPLE_BIND:
4026 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
4027 break;
4028
4029 case GIMPLE_TRY:
4030 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
4031 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
4032 break;
4033
4034 case GIMPLE_EH_FILTER:
4035 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
4036 break;
4037
4038 case GIMPLE_CATCH:
4039 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
4040 break;
4041
4042 default:
4043 {
4044 bool err2 = verify_types_in_gimple_stmt (stmt);
4045 if (err2)
4046 debug_gimple_stmt (stmt);
4047 err |= err2;
4048 }
4049 }
4050 }
4051
4052 return err;
4053 }
4054
4055
4056 /* Verify the GIMPLE statements inside the statement list STMTS. */
4057
4058 void
4059 verify_types_in_gimple_seq (gimple_seq stmts)
4060 {
4061 if (verify_types_in_gimple_seq_2 (stmts))
4062 internal_error ("verify_gimple failed");
4063 }
4064
4065
4066 /* Verify STMT, return true if STMT is not in GIMPLE form.
4067 TODO: Implement type checking. */
4068
4069 static bool
4070 verify_stmt (gimple_stmt_iterator *gsi)
4071 {
4072 tree addr;
4073 struct walk_stmt_info wi;
4074 bool last_in_block = gsi_one_before_end_p (*gsi);
4075 gimple stmt = gsi_stmt (*gsi);
4076
4077 if (is_gimple_omp (stmt))
4078 {
4079 /* OpenMP directives are validated by the FE and never operated
4080 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4081 non-gimple expressions when the main index variable has had
4082 its address taken. This does not affect the loop itself
4083 because the header of an GIMPLE_OMP_FOR is merely used to determine
4084 how to setup the parallel iteration. */
4085 return false;
4086 }
4087
4088 /* FIXME. The C frontend passes unpromoted arguments in case it
4089 didn't see a function declaration before the call. */
4090 if (is_gimple_call (stmt))
4091 {
4092 tree decl;
4093
4094 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4095 {
4096 error ("invalid function in call statement");
4097 return true;
4098 }
4099
4100 decl = gimple_call_fndecl (stmt);
4101 if (decl
4102 && TREE_CODE (decl) == FUNCTION_DECL
4103 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4104 && (!DECL_PURE_P (decl))
4105 && (!TREE_READONLY (decl)))
4106 {
4107 error ("invalid pure const state for function");
4108 return true;
4109 }
4110 }
4111
4112 memset (&wi, 0, sizeof (wi));
4113 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4114 if (addr)
4115 {
4116 debug_generic_expr (addr);
4117 inform (input_location, "in statement");
4118 debug_gimple_stmt (stmt);
4119 return true;
4120 }
4121
4122 /* If the statement is marked as part of an EH region, then it is
4123 expected that the statement could throw. Verify that when we
4124 have optimizations that simplify statements such that we prove
4125 that they cannot throw, that we update other data structures
4126 to match. */
4127 if (lookup_stmt_eh_region (stmt) >= 0)
4128 {
4129 if (!stmt_could_throw_p (stmt))
4130 {
4131 error ("statement marked for throw, but doesn%'t");
4132 goto fail;
4133 }
4134 if (!last_in_block && stmt_can_throw_internal (stmt))
4135 {
4136 error ("statement marked for throw in middle of block");
4137 goto fail;
4138 }
4139 }
4140
4141 return false;
4142
4143 fail:
4144 debug_gimple_stmt (stmt);
4145 return true;
4146 }
4147
4148
4149 /* Return true when the T can be shared. */
4150
4151 static bool
4152 tree_node_can_be_shared (tree t)
4153 {
4154 if (IS_TYPE_OR_DECL_P (t)
4155 || is_gimple_min_invariant (t)
4156 || TREE_CODE (t) == SSA_NAME
4157 || t == error_mark_node
4158 || TREE_CODE (t) == IDENTIFIER_NODE)
4159 return true;
4160
4161 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4162 return true;
4163
4164 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4165 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4166 || TREE_CODE (t) == COMPONENT_REF
4167 || TREE_CODE (t) == REALPART_EXPR
4168 || TREE_CODE (t) == IMAGPART_EXPR)
4169 t = TREE_OPERAND (t, 0);
4170
4171 if (DECL_P (t))
4172 return true;
4173
4174 return false;
4175 }
4176
4177
4178 /* Called via walk_gimple_stmt. Verify tree sharing. */
4179
4180 static tree
4181 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4182 {
4183 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4184 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4185
4186 if (tree_node_can_be_shared (*tp))
4187 {
4188 *walk_subtrees = false;
4189 return NULL;
4190 }
4191
4192 if (pointer_set_insert (visited, *tp))
4193 return *tp;
4194
4195 return NULL;
4196 }
4197
4198
4199 static bool eh_error_found;
4200 static int
4201 verify_eh_throw_stmt_node (void **slot, void *data)
4202 {
4203 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4204 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4205
4206 if (!pointer_set_contains (visited, node->stmt))
4207 {
4208 error ("Dead STMT in EH table");
4209 debug_gimple_stmt (node->stmt);
4210 eh_error_found = true;
4211 }
4212 return 1;
4213 }
4214
4215
4216 /* Verify the GIMPLE statements in every basic block. */
4217
4218 void
4219 verify_stmts (void)
4220 {
4221 basic_block bb;
4222 gimple_stmt_iterator gsi;
4223 bool err = false;
4224 struct pointer_set_t *visited, *visited_stmts;
4225 tree addr;
4226 struct walk_stmt_info wi;
4227
4228 timevar_push (TV_TREE_STMT_VERIFY);
4229 visited = pointer_set_create ();
4230 visited_stmts = pointer_set_create ();
4231
4232 memset (&wi, 0, sizeof (wi));
4233 wi.info = (void *) visited;
4234
4235 FOR_EACH_BB (bb)
4236 {
4237 gimple phi;
4238 size_t i;
4239
4240 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4241 {
4242 phi = gsi_stmt (gsi);
4243 pointer_set_insert (visited_stmts, phi);
4244 if (gimple_bb (phi) != bb)
4245 {
4246 error ("gimple_bb (phi) is set to a wrong basic block");
4247 err |= true;
4248 }
4249
4250 for (i = 0; i < gimple_phi_num_args (phi); i++)
4251 {
4252 tree t = gimple_phi_arg_def (phi, i);
4253 tree addr;
4254
4255 if (!t)
4256 {
4257 error ("missing PHI def");
4258 debug_gimple_stmt (phi);
4259 err |= true;
4260 continue;
4261 }
4262 /* Addressable variables do have SSA_NAMEs but they
4263 are not considered gimple values. */
4264 else if (TREE_CODE (t) != SSA_NAME
4265 && TREE_CODE (t) != FUNCTION_DECL
4266 && !is_gimple_min_invariant (t))
4267 {
4268 error ("PHI argument is not a GIMPLE value");
4269 debug_gimple_stmt (phi);
4270 debug_generic_expr (t);
4271 err |= true;
4272 }
4273
4274 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4275 if (addr)
4276 {
4277 error ("incorrect sharing of tree nodes");
4278 debug_gimple_stmt (phi);
4279 debug_generic_expr (addr);
4280 err |= true;
4281 }
4282 }
4283 }
4284
4285 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4286 {
4287 gimple stmt = gsi_stmt (gsi);
4288
4289 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4290 || gimple_code (stmt) == GIMPLE_BIND)
4291 {
4292 error ("invalid GIMPLE statement");
4293 debug_gimple_stmt (stmt);
4294 err |= true;
4295 }
4296
4297 pointer_set_insert (visited_stmts, stmt);
4298
4299 if (gimple_bb (stmt) != bb)
4300 {
4301 error ("gimple_bb (stmt) is set to a wrong basic block");
4302 err |= true;
4303 }
4304
4305 if (gimple_code (stmt) == GIMPLE_LABEL)
4306 {
4307 tree decl = gimple_label_label (stmt);
4308 int uid = LABEL_DECL_UID (decl);
4309
4310 if (uid == -1
4311 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4312 {
4313 error ("incorrect entry in label_to_block_map.\n");
4314 err |= true;
4315 }
4316 }
4317
4318 err |= verify_stmt (&gsi);
4319 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4320 if (addr)
4321 {
4322 error ("incorrect sharing of tree nodes");
4323 debug_gimple_stmt (stmt);
4324 debug_generic_expr (addr);
4325 err |= true;
4326 }
4327 gsi_next (&gsi);
4328 }
4329 }
4330
4331 eh_error_found = false;
4332 if (get_eh_throw_stmt_table (cfun))
4333 htab_traverse (get_eh_throw_stmt_table (cfun),
4334 verify_eh_throw_stmt_node,
4335 visited_stmts);
4336
4337 if (err | eh_error_found)
4338 internal_error ("verify_stmts failed");
4339
4340 pointer_set_destroy (visited);
4341 pointer_set_destroy (visited_stmts);
4342 verify_histograms ();
4343 timevar_pop (TV_TREE_STMT_VERIFY);
4344 }
4345
4346
4347 /* Verifies that the flow information is OK. */
4348
4349 static int
4350 gimple_verify_flow_info (void)
4351 {
4352 int err = 0;
4353 basic_block bb;
4354 gimple_stmt_iterator gsi;
4355 gimple stmt;
4356 edge e;
4357 edge_iterator ei;
4358
4359 if (ENTRY_BLOCK_PTR->il.gimple)
4360 {
4361 error ("ENTRY_BLOCK has IL associated with it");
4362 err = 1;
4363 }
4364
4365 if (EXIT_BLOCK_PTR->il.gimple)
4366 {
4367 error ("EXIT_BLOCK has IL associated with it");
4368 err = 1;
4369 }
4370
4371 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4372 if (e->flags & EDGE_FALLTHRU)
4373 {
4374 error ("fallthru to exit from bb %d", e->src->index);
4375 err = 1;
4376 }
4377
4378 FOR_EACH_BB (bb)
4379 {
4380 bool found_ctrl_stmt = false;
4381
4382 stmt = NULL;
4383
4384 /* Skip labels on the start of basic block. */
4385 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4386 {
4387 tree label;
4388 gimple prev_stmt = stmt;
4389
4390 stmt = gsi_stmt (gsi);
4391
4392 if (gimple_code (stmt) != GIMPLE_LABEL)
4393 break;
4394
4395 label = gimple_label_label (stmt);
4396 if (prev_stmt && DECL_NONLOCAL (label))
4397 {
4398 error ("nonlocal label ");
4399 print_generic_expr (stderr, label, 0);
4400 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4401 bb->index);
4402 err = 1;
4403 }
4404
4405 if (label_to_block (label) != bb)
4406 {
4407 error ("label ");
4408 print_generic_expr (stderr, label, 0);
4409 fprintf (stderr, " to block does not match in bb %d",
4410 bb->index);
4411 err = 1;
4412 }
4413
4414 if (decl_function_context (label) != current_function_decl)
4415 {
4416 error ("label ");
4417 print_generic_expr (stderr, label, 0);
4418 fprintf (stderr, " has incorrect context in bb %d",
4419 bb->index);
4420 err = 1;
4421 }
4422 }
4423
4424 /* Verify that body of basic block BB is free of control flow. */
4425 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4426 {
4427 gimple stmt = gsi_stmt (gsi);
4428
4429 if (found_ctrl_stmt)
4430 {
4431 error ("control flow in the middle of basic block %d",
4432 bb->index);
4433 err = 1;
4434 }
4435
4436 if (stmt_ends_bb_p (stmt))
4437 found_ctrl_stmt = true;
4438
4439 if (gimple_code (stmt) == GIMPLE_LABEL)
4440 {
4441 error ("label ");
4442 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4443 fprintf (stderr, " in the middle of basic block %d", bb->index);
4444 err = 1;
4445 }
4446 }
4447
4448 gsi = gsi_last_bb (bb);
4449 if (gsi_end_p (gsi))
4450 continue;
4451
4452 stmt = gsi_stmt (gsi);
4453
4454 err |= verify_eh_edges (stmt);
4455
4456 if (is_ctrl_stmt (stmt))
4457 {
4458 FOR_EACH_EDGE (e, ei, bb->succs)
4459 if (e->flags & EDGE_FALLTHRU)
4460 {
4461 error ("fallthru edge after a control statement in bb %d",
4462 bb->index);
4463 err = 1;
4464 }
4465 }
4466
4467 if (gimple_code (stmt) != GIMPLE_COND)
4468 {
4469 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4470 after anything else but if statement. */
4471 FOR_EACH_EDGE (e, ei, bb->succs)
4472 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4473 {
4474 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4475 bb->index);
4476 err = 1;
4477 }
4478 }
4479
4480 switch (gimple_code (stmt))
4481 {
4482 case GIMPLE_COND:
4483 {
4484 edge true_edge;
4485 edge false_edge;
4486
4487 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4488
4489 if (!true_edge
4490 || !false_edge
4491 || !(true_edge->flags & EDGE_TRUE_VALUE)
4492 || !(false_edge->flags & EDGE_FALSE_VALUE)
4493 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4494 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4495 || EDGE_COUNT (bb->succs) >= 3)
4496 {
4497 error ("wrong outgoing edge flags at end of bb %d",
4498 bb->index);
4499 err = 1;
4500 }
4501 }
4502 break;
4503
4504 case GIMPLE_GOTO:
4505 if (simple_goto_p (stmt))
4506 {
4507 error ("explicit goto at end of bb %d", bb->index);
4508 err = 1;
4509 }
4510 else
4511 {
4512 /* FIXME. We should double check that the labels in the
4513 destination blocks have their address taken. */
4514 FOR_EACH_EDGE (e, ei, bb->succs)
4515 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4516 | EDGE_FALSE_VALUE))
4517 || !(e->flags & EDGE_ABNORMAL))
4518 {
4519 error ("wrong outgoing edge flags at end of bb %d",
4520 bb->index);
4521 err = 1;
4522 }
4523 }
4524 break;
4525
4526 case GIMPLE_RETURN:
4527 if (!single_succ_p (bb)
4528 || (single_succ_edge (bb)->flags
4529 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4530 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4531 {
4532 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4533 err = 1;
4534 }
4535 if (single_succ (bb) != EXIT_BLOCK_PTR)
4536 {
4537 error ("return edge does not point to exit in bb %d",
4538 bb->index);
4539 err = 1;
4540 }
4541 break;
4542
4543 case GIMPLE_SWITCH:
4544 {
4545 tree prev;
4546 edge e;
4547 size_t i, n;
4548
4549 n = gimple_switch_num_labels (stmt);
4550
4551 /* Mark all the destination basic blocks. */
4552 for (i = 0; i < n; ++i)
4553 {
4554 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4555 basic_block label_bb = label_to_block (lab);
4556 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4557 label_bb->aux = (void *)1;
4558 }
4559
4560 /* Verify that the case labels are sorted. */
4561 prev = gimple_switch_label (stmt, 0);
4562 for (i = 1; i < n; ++i)
4563 {
4564 tree c = gimple_switch_label (stmt, i);
4565 if (!CASE_LOW (c))
4566 {
4567 error ("found default case not at the start of "
4568 "case vector");
4569 err = 1;
4570 continue;
4571 }
4572 if (CASE_LOW (prev)
4573 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4574 {
4575 error ("case labels not sorted: ");
4576 print_generic_expr (stderr, prev, 0);
4577 fprintf (stderr," is greater than ");
4578 print_generic_expr (stderr, c, 0);
4579 fprintf (stderr," but comes before it.\n");
4580 err = 1;
4581 }
4582 prev = c;
4583 }
4584 /* VRP will remove the default case if it can prove it will
4585 never be executed. So do not verify there always exists
4586 a default case here. */
4587
4588 FOR_EACH_EDGE (e, ei, bb->succs)
4589 {
4590 if (!e->dest->aux)
4591 {
4592 error ("extra outgoing edge %d->%d",
4593 bb->index, e->dest->index);
4594 err = 1;
4595 }
4596
4597 e->dest->aux = (void *)2;
4598 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4599 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4600 {
4601 error ("wrong outgoing edge flags at end of bb %d",
4602 bb->index);
4603 err = 1;
4604 }
4605 }
4606
4607 /* Check that we have all of them. */
4608 for (i = 0; i < n; ++i)
4609 {
4610 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4611 basic_block label_bb = label_to_block (lab);
4612
4613 if (label_bb->aux != (void *)2)
4614 {
4615 error ("missing edge %i->%i", bb->index, label_bb->index);
4616 err = 1;
4617 }
4618 }
4619
4620 FOR_EACH_EDGE (e, ei, bb->succs)
4621 e->dest->aux = (void *)0;
4622 }
4623
4624 default: ;
4625 }
4626 }
4627
4628 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4629 verify_dominators (CDI_DOMINATORS);
4630
4631 return err;
4632 }
4633
4634
4635 /* Updates phi nodes after creating a forwarder block joined
4636 by edge FALLTHRU. */
4637
4638 static void
4639 gimple_make_forwarder_block (edge fallthru)
4640 {
4641 edge e;
4642 edge_iterator ei;
4643 basic_block dummy, bb;
4644 tree var;
4645 gimple_stmt_iterator gsi;
4646
4647 dummy = fallthru->src;
4648 bb = fallthru->dest;
4649
4650 if (single_pred_p (bb))
4651 return;
4652
4653 /* If we redirected a branch we must create new PHI nodes at the
4654 start of BB. */
4655 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4656 {
4657 gimple phi, new_phi;
4658
4659 phi = gsi_stmt (gsi);
4660 var = gimple_phi_result (phi);
4661 new_phi = create_phi_node (var, bb);
4662 SSA_NAME_DEF_STMT (var) = new_phi;
4663 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4664 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
4665 }
4666
4667 /* Add the arguments we have stored on edges. */
4668 FOR_EACH_EDGE (e, ei, bb->preds)
4669 {
4670 if (e == fallthru)
4671 continue;
4672
4673 flush_pending_stmts (e);
4674 }
4675 }
4676
4677
4678 /* Return a non-special label in the head of basic block BLOCK.
4679 Create one if it doesn't exist. */
4680
4681 tree
4682 gimple_block_label (basic_block bb)
4683 {
4684 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4685 bool first = true;
4686 tree label;
4687 gimple stmt;
4688
4689 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4690 {
4691 stmt = gsi_stmt (i);
4692 if (gimple_code (stmt) != GIMPLE_LABEL)
4693 break;
4694 label = gimple_label_label (stmt);
4695 if (!DECL_NONLOCAL (label))
4696 {
4697 if (!first)
4698 gsi_move_before (&i, &s);
4699 return label;
4700 }
4701 }
4702
4703 label = create_artificial_label ();
4704 stmt = gimple_build_label (label);
4705 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4706 return label;
4707 }
4708
4709
4710 /* Attempt to perform edge redirection by replacing a possibly complex
4711 jump instruction by a goto or by removing the jump completely.
4712 This can apply only if all edges now point to the same block. The
4713 parameters and return values are equivalent to
4714 redirect_edge_and_branch. */
4715
4716 static edge
4717 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4718 {
4719 basic_block src = e->src;
4720 gimple_stmt_iterator i;
4721 gimple stmt;
4722
4723 /* We can replace or remove a complex jump only when we have exactly
4724 two edges. */
4725 if (EDGE_COUNT (src->succs) != 2
4726 /* Verify that all targets will be TARGET. Specifically, the
4727 edge that is not E must also go to TARGET. */
4728 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4729 return NULL;
4730
4731 i = gsi_last_bb (src);
4732 if (gsi_end_p (i))
4733 return NULL;
4734
4735 stmt = gsi_stmt (i);
4736
4737 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4738 {
4739 gsi_remove (&i, true);
4740 e = ssa_redirect_edge (e, target);
4741 e->flags = EDGE_FALLTHRU;
4742 return e;
4743 }
4744
4745 return NULL;
4746 }
4747
4748
4749 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4750 edge representing the redirected branch. */
4751
4752 static edge
4753 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4754 {
4755 basic_block bb = e->src;
4756 gimple_stmt_iterator gsi;
4757 edge ret;
4758 gimple stmt;
4759
4760 if (e->flags & EDGE_ABNORMAL)
4761 return NULL;
4762
4763 if (e->src != ENTRY_BLOCK_PTR
4764 && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
4765 return ret;
4766
4767 if (e->dest == dest)
4768 return NULL;
4769
4770 gsi = gsi_last_bb (bb);
4771 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4772
4773 switch (stmt ? gimple_code (stmt) : ERROR_MARK)
4774 {
4775 case GIMPLE_COND:
4776 /* For COND_EXPR, we only need to redirect the edge. */
4777 break;
4778
4779 case GIMPLE_GOTO:
4780 /* No non-abnormal edges should lead from a non-simple goto, and
4781 simple ones should be represented implicitly. */
4782 gcc_unreachable ();
4783
4784 case GIMPLE_SWITCH:
4785 {
4786 tree label = gimple_block_label (dest);
4787 tree cases = get_cases_for_edge (e, stmt);
4788
4789 /* If we have a list of cases associated with E, then use it
4790 as it's a lot faster than walking the entire case vector. */
4791 if (cases)
4792 {
4793 edge e2 = find_edge (e->src, dest);
4794 tree last, first;
4795
4796 first = cases;
4797 while (cases)
4798 {
4799 last = cases;
4800 CASE_LABEL (cases) = label;
4801 cases = TREE_CHAIN (cases);
4802 }
4803
4804 /* If there was already an edge in the CFG, then we need
4805 to move all the cases associated with E to E2. */
4806 if (e2)
4807 {
4808 tree cases2 = get_cases_for_edge (e2, stmt);
4809
4810 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4811 TREE_CHAIN (cases2) = first;
4812 }
4813 }
4814 else
4815 {
4816 size_t i, n = gimple_switch_num_labels (stmt);
4817
4818 for (i = 0; i < n; i++)
4819 {
4820 tree elt = gimple_switch_label (stmt, i);
4821 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4822 CASE_LABEL (elt) = label;
4823 }
4824 }
4825
4826 break;
4827 }
4828
4829 case GIMPLE_RETURN:
4830 gsi_remove (&gsi, true);
4831 e->flags |= EDGE_FALLTHRU;
4832 break;
4833
4834 case GIMPLE_OMP_RETURN:
4835 case GIMPLE_OMP_CONTINUE:
4836 case GIMPLE_OMP_SECTIONS_SWITCH:
4837 case GIMPLE_OMP_FOR:
4838 /* The edges from OMP constructs can be simply redirected. */
4839 break;
4840
4841 default:
4842 /* Otherwise it must be a fallthru edge, and we don't need to
4843 do anything besides redirecting it. */
4844 gcc_assert (e->flags & EDGE_FALLTHRU);
4845 break;
4846 }
4847
4848 /* Update/insert PHI nodes as necessary. */
4849
4850 /* Now update the edges in the CFG. */
4851 e = ssa_redirect_edge (e, dest);
4852
4853 return e;
4854 }
4855
4856 /* Returns true if it is possible to remove edge E by redirecting
4857 it to the destination of the other edge from E->src. */
4858
4859 static bool
4860 gimple_can_remove_branch_p (const_edge e)
4861 {
4862 if (e->flags & EDGE_ABNORMAL)
4863 return false;
4864
4865 return true;
4866 }
4867
4868 /* Simple wrapper, as we can always redirect fallthru edges. */
4869
4870 static basic_block
4871 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4872 {
4873 e = gimple_redirect_edge_and_branch (e, dest);
4874 gcc_assert (e);
4875
4876 return NULL;
4877 }
4878
4879
4880 /* Splits basic block BB after statement STMT (but at least after the
4881 labels). If STMT is NULL, BB is split just after the labels. */
4882
4883 static basic_block
4884 gimple_split_block (basic_block bb, void *stmt)
4885 {
4886 gimple_stmt_iterator gsi;
4887 gimple_stmt_iterator gsi_tgt;
4888 gimple act;
4889 gimple_seq list;
4890 basic_block new_bb;
4891 edge e;
4892 edge_iterator ei;
4893
4894 new_bb = create_empty_bb (bb);
4895
4896 /* Redirect the outgoing edges. */
4897 new_bb->succs = bb->succs;
4898 bb->succs = NULL;
4899 FOR_EACH_EDGE (e, ei, new_bb->succs)
4900 e->src = new_bb;
4901
4902 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4903 stmt = NULL;
4904
4905 /* Move everything from GSI to the new basic block. */
4906 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4907 {
4908 act = gsi_stmt (gsi);
4909 if (gimple_code (act) == GIMPLE_LABEL)
4910 continue;
4911
4912 if (!stmt)
4913 break;
4914
4915 if (stmt == act)
4916 {
4917 gsi_next (&gsi);
4918 break;
4919 }
4920 }
4921
4922 if (gsi_end_p (gsi))
4923 return new_bb;
4924
4925 /* Split the statement list - avoid re-creating new containers as this
4926 brings ugly quadratic memory consumption in the inliner.
4927 (We are still quadratic since we need to update stmt BB pointers,
4928 sadly.) */
4929 list = gsi_split_seq_before (&gsi);
4930 set_bb_seq (new_bb, list);
4931 for (gsi_tgt = gsi_start (list);
4932 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4933 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4934
4935 return new_bb;
4936 }
4937
4938
4939 /* Moves basic block BB after block AFTER. */
4940
4941 static bool
4942 gimple_move_block_after (basic_block bb, basic_block after)
4943 {
4944 if (bb->prev_bb == after)
4945 return true;
4946
4947 unlink_block (bb);
4948 link_block (bb, after);
4949
4950 return true;
4951 }
4952
4953
4954 /* Return true if basic_block can be duplicated. */
4955
4956 static bool
4957 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4958 {
4959 return true;
4960 }
4961
4962 /* Create a duplicate of the basic block BB. NOTE: This does not
4963 preserve SSA form. */
4964
4965 static basic_block
4966 gimple_duplicate_bb (basic_block bb)
4967 {
4968 basic_block new_bb;
4969 gimple_stmt_iterator gsi, gsi_tgt;
4970 gimple_seq phis = phi_nodes (bb);
4971 gimple phi, stmt, copy;
4972
4973 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
4974
4975 /* Copy the PHI nodes. We ignore PHI node arguments here because
4976 the incoming edges have not been setup yet. */
4977 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
4978 {
4979 phi = gsi_stmt (gsi);
4980 copy = create_phi_node (gimple_phi_result (phi), new_bb);
4981 create_new_def_for (gimple_phi_result (copy), copy,
4982 gimple_phi_result_ptr (copy));
4983 }
4984
4985 gsi_tgt = gsi_start_bb (new_bb);
4986 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4987 {
4988 def_operand_p def_p;
4989 ssa_op_iter op_iter;
4990 int region;
4991
4992 stmt = gsi_stmt (gsi);
4993 if (gimple_code (stmt) == GIMPLE_LABEL)
4994 continue;
4995
4996 /* Create a new copy of STMT and duplicate STMT's virtual
4997 operands. */
4998 copy = gimple_copy (stmt);
4999 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5000 region = lookup_stmt_eh_region (stmt);
5001 if (region >= 0)
5002 add_stmt_to_eh_region (copy, region);
5003 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5004
5005 /* Create new names for all the definitions created by COPY and
5006 add replacement mappings for each new name. */
5007 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5008 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5009 }
5010
5011 return new_bb;
5012 }
5013
5014 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5015
5016 static void
5017 add_phi_args_after_copy_edge (edge e_copy)
5018 {
5019 basic_block bb, bb_copy = e_copy->src, dest;
5020 edge e;
5021 edge_iterator ei;
5022 gimple phi, phi_copy;
5023 tree def;
5024 gimple_stmt_iterator psi, psi_copy;
5025
5026 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5027 return;
5028
5029 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5030
5031 if (e_copy->dest->flags & BB_DUPLICATED)
5032 dest = get_bb_original (e_copy->dest);
5033 else
5034 dest = e_copy->dest;
5035
5036 e = find_edge (bb, dest);
5037 if (!e)
5038 {
5039 /* During loop unrolling the target of the latch edge is copied.
5040 In this case we are not looking for edge to dest, but to
5041 duplicated block whose original was dest. */
5042 FOR_EACH_EDGE (e, ei, bb->succs)
5043 {
5044 if ((e->dest->flags & BB_DUPLICATED)
5045 && get_bb_original (e->dest) == dest)
5046 break;
5047 }
5048
5049 gcc_assert (e != NULL);
5050 }
5051
5052 for (psi = gsi_start_phis (e->dest),
5053 psi_copy = gsi_start_phis (e_copy->dest);
5054 !gsi_end_p (psi);
5055 gsi_next (&psi), gsi_next (&psi_copy))
5056 {
5057 phi = gsi_stmt (psi);
5058 phi_copy = gsi_stmt (psi_copy);
5059 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5060 add_phi_arg (phi_copy, def, e_copy);
5061 }
5062 }
5063
5064
5065 /* Basic block BB_COPY was created by code duplication. Add phi node
5066 arguments for edges going out of BB_COPY. The blocks that were
5067 duplicated have BB_DUPLICATED set. */
5068
5069 void
5070 add_phi_args_after_copy_bb (basic_block bb_copy)
5071 {
5072 edge e_copy;
5073 edge_iterator ei;
5074
5075 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5076 {
5077 add_phi_args_after_copy_edge (e_copy);
5078 }
5079 }
5080
5081 /* Blocks in REGION_COPY array of length N_REGION were created by
5082 duplication of basic blocks. Add phi node arguments for edges
5083 going from these blocks. If E_COPY is not NULL, also add
5084 phi node arguments for its destination.*/
5085
5086 void
5087 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5088 edge e_copy)
5089 {
5090 unsigned i;
5091
5092 for (i = 0; i < n_region; i++)
5093 region_copy[i]->flags |= BB_DUPLICATED;
5094
5095 for (i = 0; i < n_region; i++)
5096 add_phi_args_after_copy_bb (region_copy[i]);
5097 if (e_copy)
5098 add_phi_args_after_copy_edge (e_copy);
5099
5100 for (i = 0; i < n_region; i++)
5101 region_copy[i]->flags &= ~BB_DUPLICATED;
5102 }
5103
5104 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5105 important exit edge EXIT. By important we mean that no SSA name defined
5106 inside region is live over the other exit edges of the region. All entry
5107 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5108 to the duplicate of the region. SSA form, dominance and loop information
5109 is updated. The new basic blocks are stored to REGION_COPY in the same
5110 order as they had in REGION, provided that REGION_COPY is not NULL.
5111 The function returns false if it is unable to copy the region,
5112 true otherwise. */
5113
5114 bool
5115 gimple_duplicate_sese_region (edge entry, edge exit,
5116 basic_block *region, unsigned n_region,
5117 basic_block *region_copy)
5118 {
5119 unsigned i;
5120 bool free_region_copy = false, copying_header = false;
5121 struct loop *loop = entry->dest->loop_father;
5122 edge exit_copy;
5123 VEC (basic_block, heap) *doms;
5124 edge redirected;
5125 int total_freq = 0, entry_freq = 0;
5126 gcov_type total_count = 0, entry_count = 0;
5127
5128 if (!can_copy_bbs_p (region, n_region))
5129 return false;
5130
5131 /* Some sanity checking. Note that we do not check for all possible
5132 missuses of the functions. I.e. if you ask to copy something weird,
5133 it will work, but the state of structures probably will not be
5134 correct. */
5135 for (i = 0; i < n_region; i++)
5136 {
5137 /* We do not handle subloops, i.e. all the blocks must belong to the
5138 same loop. */
5139 if (region[i]->loop_father != loop)
5140 return false;
5141
5142 if (region[i] != entry->dest
5143 && region[i] == loop->header)
5144 return false;
5145 }
5146
5147 set_loop_copy (loop, loop);
5148
5149 /* In case the function is used for loop header copying (which is the primary
5150 use), ensure that EXIT and its copy will be new latch and entry edges. */
5151 if (loop->header == entry->dest)
5152 {
5153 copying_header = true;
5154 set_loop_copy (loop, loop_outer (loop));
5155
5156 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5157 return false;
5158
5159 for (i = 0; i < n_region; i++)
5160 if (region[i] != exit->src
5161 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5162 return false;
5163 }
5164
5165 if (!region_copy)
5166 {
5167 region_copy = XNEWVEC (basic_block, n_region);
5168 free_region_copy = true;
5169 }
5170
5171 gcc_assert (!need_ssa_update_p (cfun));
5172
5173 /* Record blocks outside the region that are dominated by something
5174 inside. */
5175 doms = NULL;
5176 initialize_original_copy_tables ();
5177
5178 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5179
5180 if (entry->dest->count)
5181 {
5182 total_count = entry->dest->count;
5183 entry_count = entry->count;
5184 /* Fix up corner cases, to avoid division by zero or creation of negative
5185 frequencies. */
5186 if (entry_count > total_count)
5187 entry_count = total_count;
5188 }
5189 else
5190 {
5191 total_freq = entry->dest->frequency;
5192 entry_freq = EDGE_FREQUENCY (entry);
5193 /* Fix up corner cases, to avoid division by zero or creation of negative
5194 frequencies. */
5195 if (total_freq == 0)
5196 total_freq = 1;
5197 else if (entry_freq > total_freq)
5198 entry_freq = total_freq;
5199 }
5200
5201 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5202 split_edge_bb_loc (entry));
5203 if (total_count)
5204 {
5205 scale_bbs_frequencies_gcov_type (region, n_region,
5206 total_count - entry_count,
5207 total_count);
5208 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5209 total_count);
5210 }
5211 else
5212 {
5213 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5214 total_freq);
5215 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5216 }
5217
5218 if (copying_header)
5219 {
5220 loop->header = exit->dest;
5221 loop->latch = exit->src;
5222 }
5223
5224 /* Redirect the entry and add the phi node arguments. */
5225 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5226 gcc_assert (redirected != NULL);
5227 flush_pending_stmts (entry);
5228
5229 /* Concerning updating of dominators: We must recount dominators
5230 for entry block and its copy. Anything that is outside of the
5231 region, but was dominated by something inside needs recounting as
5232 well. */
5233 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5234 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5235 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5236 VEC_free (basic_block, heap, doms);
5237
5238 /* Add the other PHI node arguments. */
5239 add_phi_args_after_copy (region_copy, n_region, NULL);
5240
5241 /* Update the SSA web. */
5242 update_ssa (TODO_update_ssa);
5243
5244 if (free_region_copy)
5245 free (region_copy);
5246
5247 free_original_copy_tables ();
5248 return true;
5249 }
5250
5251 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5252 are stored to REGION_COPY in the same order in that they appear
5253 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5254 the region, EXIT an exit from it. The condition guarding EXIT
5255 is moved to ENTRY. Returns true if duplication succeeds, false
5256 otherwise.
5257
5258 For example,
5259
5260 some_code;
5261 if (cond)
5262 A;
5263 else
5264 B;
5265
5266 is transformed to
5267
5268 if (cond)
5269 {
5270 some_code;
5271 A;
5272 }
5273 else
5274 {
5275 some_code;
5276 B;
5277 }
5278 */
5279
5280 bool
5281 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5282 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5283 basic_block *region_copy ATTRIBUTE_UNUSED)
5284 {
5285 unsigned i;
5286 bool free_region_copy = false;
5287 struct loop *loop = exit->dest->loop_father;
5288 struct loop *orig_loop = entry->dest->loop_father;
5289 basic_block switch_bb, entry_bb, nentry_bb;
5290 VEC (basic_block, heap) *doms;
5291 int total_freq = 0, exit_freq = 0;
5292 gcov_type total_count = 0, exit_count = 0;
5293 edge exits[2], nexits[2], e;
5294 gimple_stmt_iterator gsi;
5295 gimple cond_stmt;
5296 edge sorig, snew;
5297
5298 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5299 exits[0] = exit;
5300 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5301
5302 if (!can_copy_bbs_p (region, n_region))
5303 return false;
5304
5305 /* Some sanity checking. Note that we do not check for all possible
5306 missuses of the functions. I.e. if you ask to copy something weird
5307 (e.g., in the example, if there is a jump from inside to the middle
5308 of some_code, or come_code defines some of the values used in cond)
5309 it will work, but the resulting code will not be correct. */
5310 for (i = 0; i < n_region; i++)
5311 {
5312 /* We do not handle subloops, i.e. all the blocks must belong to the
5313 same loop. */
5314 if (region[i]->loop_father != orig_loop)
5315 return false;
5316
5317 if (region[i] == orig_loop->latch)
5318 return false;
5319 }
5320
5321 initialize_original_copy_tables ();
5322 set_loop_copy (orig_loop, loop);
5323
5324 if (!region_copy)
5325 {
5326 region_copy = XNEWVEC (basic_block, n_region);
5327 free_region_copy = true;
5328 }
5329
5330 gcc_assert (!need_ssa_update_p (cfun));
5331
5332 /* Record blocks outside the region that are dominated by something
5333 inside. */
5334 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5335
5336 if (exit->src->count)
5337 {
5338 total_count = exit->src->count;
5339 exit_count = exit->count;
5340 /* Fix up corner cases, to avoid division by zero or creation of negative
5341 frequencies. */
5342 if (exit_count > total_count)
5343 exit_count = total_count;
5344 }
5345 else
5346 {
5347 total_freq = exit->src->frequency;
5348 exit_freq = EDGE_FREQUENCY (exit);
5349 /* Fix up corner cases, to avoid division by zero or creation of negative
5350 frequencies. */
5351 if (total_freq == 0)
5352 total_freq = 1;
5353 if (exit_freq > total_freq)
5354 exit_freq = total_freq;
5355 }
5356
5357 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5358 split_edge_bb_loc (exit));
5359 if (total_count)
5360 {
5361 scale_bbs_frequencies_gcov_type (region, n_region,
5362 total_count - exit_count,
5363 total_count);
5364 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5365 total_count);
5366 }
5367 else
5368 {
5369 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5370 total_freq);
5371 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5372 }
5373
5374 /* Create the switch block, and put the exit condition to it. */
5375 entry_bb = entry->dest;
5376 nentry_bb = get_bb_copy (entry_bb);
5377 if (!last_stmt (entry->src)
5378 || !stmt_ends_bb_p (last_stmt (entry->src)))
5379 switch_bb = entry->src;
5380 else
5381 switch_bb = split_edge (entry);
5382 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5383
5384 gsi = gsi_last_bb (switch_bb);
5385 cond_stmt = last_stmt (exit->src);
5386 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5387 cond_stmt = gimple_copy (cond_stmt);
5388 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5389 gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
5390 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5391
5392 sorig = single_succ_edge (switch_bb);
5393 sorig->flags = exits[1]->flags;
5394 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5395
5396 /* Register the new edge from SWITCH_BB in loop exit lists. */
5397 rescan_loop_exit (snew, true, false);
5398
5399 /* Add the PHI node arguments. */
5400 add_phi_args_after_copy (region_copy, n_region, snew);
5401
5402 /* Get rid of now superfluous conditions and associated edges (and phi node
5403 arguments). */
5404 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5405 PENDING_STMT (e) = NULL;
5406 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
5407 PENDING_STMT (e) = NULL;
5408
5409 /* Anything that is outside of the region, but was dominated by something
5410 inside needs to update dominance info. */
5411 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5412 VEC_free (basic_block, heap, doms);
5413
5414 /* Update the SSA web. */
5415 update_ssa (TODO_update_ssa);
5416
5417 if (free_region_copy)
5418 free (region_copy);
5419
5420 free_original_copy_tables ();
5421 return true;
5422 }
5423
5424 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5425 adding blocks when the dominator traversal reaches EXIT. This
5426 function silently assumes that ENTRY strictly dominates EXIT. */
5427
5428 void
5429 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5430 VEC(basic_block,heap) **bbs_p)
5431 {
5432 basic_block son;
5433
5434 for (son = first_dom_son (CDI_DOMINATORS, entry);
5435 son;
5436 son = next_dom_son (CDI_DOMINATORS, son))
5437 {
5438 VEC_safe_push (basic_block, heap, *bbs_p, son);
5439 if (son != exit)
5440 gather_blocks_in_sese_region (son, exit, bbs_p);
5441 }
5442 }
5443
5444 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5445 The duplicates are recorded in VARS_MAP. */
5446
5447 static void
5448 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5449 tree to_context)
5450 {
5451 tree t = *tp, new_t;
5452 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5453 void **loc;
5454
5455 if (DECL_CONTEXT (t) == to_context)
5456 return;
5457
5458 loc = pointer_map_contains (vars_map, t);
5459
5460 if (!loc)
5461 {
5462 loc = pointer_map_insert (vars_map, t);
5463
5464 if (SSA_VAR_P (t))
5465 {
5466 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5467 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5468 }
5469 else
5470 {
5471 gcc_assert (TREE_CODE (t) == CONST_DECL);
5472 new_t = copy_node (t);
5473 }
5474 DECL_CONTEXT (new_t) = to_context;
5475
5476 *loc = new_t;
5477 }
5478 else
5479 new_t = (tree) *loc;
5480
5481 *tp = new_t;
5482 }
5483
5484
5485 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5486 VARS_MAP maps old ssa names and var_decls to the new ones. */
5487
5488 static tree
5489 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5490 tree to_context)
5491 {
5492 void **loc;
5493 tree new_name, decl = SSA_NAME_VAR (name);
5494
5495 gcc_assert (is_gimple_reg (name));
5496
5497 loc = pointer_map_contains (vars_map, name);
5498
5499 if (!loc)
5500 {
5501 replace_by_duplicate_decl (&decl, vars_map, to_context);
5502
5503 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5504 if (gimple_in_ssa_p (cfun))
5505 add_referenced_var (decl);
5506
5507 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5508 if (SSA_NAME_IS_DEFAULT_DEF (name))
5509 set_default_def (decl, new_name);
5510 pop_cfun ();
5511
5512 loc = pointer_map_insert (vars_map, name);
5513 *loc = new_name;
5514 }
5515 else
5516 new_name = (tree) *loc;
5517
5518 return new_name;
5519 }
5520
5521 struct move_stmt_d
5522 {
5523 tree orig_block;
5524 tree new_block;
5525 tree from_context;
5526 tree to_context;
5527 struct pointer_map_t *vars_map;
5528 htab_t new_label_map;
5529 bool remap_decls_p;
5530 };
5531
5532 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5533 contained in *TP if it has been ORIG_BLOCK previously and change the
5534 DECL_CONTEXT of every local variable referenced in *TP. */
5535
5536 static tree
5537 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5538 {
5539 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5540 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5541 tree t = *tp;
5542
5543 if (EXPR_P (t))
5544 /* We should never have TREE_BLOCK set on non-statements. */
5545 gcc_assert (!TREE_BLOCK (t));
5546
5547 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5548 {
5549 if (TREE_CODE (t) == SSA_NAME)
5550 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5551 else if (TREE_CODE (t) == LABEL_DECL)
5552 {
5553 if (p->new_label_map)
5554 {
5555 struct tree_map in, *out;
5556 in.base.from = t;
5557 out = (struct tree_map *)
5558 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5559 if (out)
5560 *tp = t = out->to;
5561 }
5562
5563 DECL_CONTEXT (t) = p->to_context;
5564 }
5565 else if (p->remap_decls_p)
5566 {
5567 /* Replace T with its duplicate. T should no longer appear in the
5568 parent function, so this looks wasteful; however, it may appear
5569 in referenced_vars, and more importantly, as virtual operands of
5570 statements, and in alias lists of other variables. It would be
5571 quite difficult to expunge it from all those places. ??? It might
5572 suffice to do this for addressable variables. */
5573 if ((TREE_CODE (t) == VAR_DECL
5574 && !is_global_var (t))
5575 || TREE_CODE (t) == CONST_DECL)
5576 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5577
5578 if (SSA_VAR_P (t)
5579 && gimple_in_ssa_p (cfun))
5580 {
5581 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5582 add_referenced_var (*tp);
5583 pop_cfun ();
5584 }
5585 }
5586 *walk_subtrees = 0;
5587 }
5588 else if (TYPE_P (t))
5589 *walk_subtrees = 0;
5590
5591 return NULL_TREE;
5592 }
5593
5594 /* Like move_stmt_op, but for gimple statements.
5595
5596 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5597 contained in the current statement in *GSI_P and change the
5598 DECL_CONTEXT of every local variable referenced in the current
5599 statement. */
5600
5601 static tree
5602 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5603 struct walk_stmt_info *wi)
5604 {
5605 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5606 gimple stmt = gsi_stmt (*gsi_p);
5607 tree block = gimple_block (stmt);
5608
5609 if (p->orig_block == NULL_TREE
5610 || block == p->orig_block
5611 || block == NULL_TREE)
5612 gimple_set_block (stmt, p->new_block);
5613 #ifdef ENABLE_CHECKING
5614 else if (block != p->new_block)
5615 {
5616 while (block && block != p->orig_block)
5617 block = BLOCK_SUPERCONTEXT (block);
5618 gcc_assert (block);
5619 }
5620 #endif
5621
5622 if (is_gimple_omp (stmt)
5623 && gimple_code (stmt) != GIMPLE_OMP_RETURN
5624 && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
5625 {
5626 /* Do not remap variables inside OMP directives. Variables
5627 referenced in clauses and directive header belong to the
5628 parent function and should not be moved into the child
5629 function. */
5630 bool save_remap_decls_p = p->remap_decls_p;
5631 p->remap_decls_p = false;
5632 *handled_ops_p = true;
5633
5634 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
5635
5636 p->remap_decls_p = save_remap_decls_p;
5637 }
5638
5639 return NULL_TREE;
5640 }
5641
5642 /* Marks virtual operands of all statements in basic blocks BBS for
5643 renaming. */
5644
5645 void
5646 mark_virtual_ops_in_bb (basic_block bb)
5647 {
5648 gimple_stmt_iterator gsi;
5649
5650 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5651 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5652
5653 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5654 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5655 }
5656
5657 /* Move basic block BB from function CFUN to function DEST_FN. The
5658 block is moved out of the original linked list and placed after
5659 block AFTER in the new list. Also, the block is removed from the
5660 original array of blocks and placed in DEST_FN's array of blocks.
5661 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5662 updated to reflect the moved edges.
5663
5664 The local variables are remapped to new instances, VARS_MAP is used
5665 to record the mapping. */
5666
5667 static void
5668 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5669 basic_block after, bool update_edge_count_p,
5670 struct move_stmt_d *d, int eh_offset)
5671 {
5672 struct control_flow_graph *cfg;
5673 edge_iterator ei;
5674 edge e;
5675 gimple_stmt_iterator si;
5676 unsigned old_len, new_len;
5677
5678 /* Remove BB from dominance structures. */
5679 delete_from_dominance_info (CDI_DOMINATORS, bb);
5680 if (current_loops)
5681 remove_bb_from_loops (bb);
5682
5683 /* Link BB to the new linked list. */
5684 move_block_after (bb, after);
5685
5686 /* Update the edge count in the corresponding flowgraphs. */
5687 if (update_edge_count_p)
5688 FOR_EACH_EDGE (e, ei, bb->succs)
5689 {
5690 cfun->cfg->x_n_edges--;
5691 dest_cfun->cfg->x_n_edges++;
5692 }
5693
5694 /* Remove BB from the original basic block array. */
5695 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5696 cfun->cfg->x_n_basic_blocks--;
5697
5698 /* Grow DEST_CFUN's basic block array if needed. */
5699 cfg = dest_cfun->cfg;
5700 cfg->x_n_basic_blocks++;
5701 if (bb->index >= cfg->x_last_basic_block)
5702 cfg->x_last_basic_block = bb->index + 1;
5703
5704 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5705 if ((unsigned) cfg->x_last_basic_block >= old_len)
5706 {
5707 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5708 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5709 new_len);
5710 }
5711
5712 VEC_replace (basic_block, cfg->x_basic_block_info,
5713 bb->index, bb);
5714
5715 /* Remap the variables in phi nodes. */
5716 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5717 {
5718 gimple phi = gsi_stmt (si);
5719 use_operand_p use;
5720 tree op = PHI_RESULT (phi);
5721 ssa_op_iter oi;
5722
5723 if (!is_gimple_reg (op))
5724 {
5725 /* Remove the phi nodes for virtual operands (alias analysis will be
5726 run for the new function, anyway). */
5727 remove_phi_node (&si, true);
5728 continue;
5729 }
5730
5731 SET_PHI_RESULT (phi,
5732 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5733 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5734 {
5735 op = USE_FROM_PTR (use);
5736 if (TREE_CODE (op) == SSA_NAME)
5737 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5738 }
5739
5740 gsi_next (&si);
5741 }
5742
5743 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5744 {
5745 gimple stmt = gsi_stmt (si);
5746 int region;
5747 struct walk_stmt_info wi;
5748
5749 memset (&wi, 0, sizeof (wi));
5750 wi.info = d;
5751 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5752
5753 if (gimple_code (stmt) == GIMPLE_LABEL)
5754 {
5755 tree label = gimple_label_label (stmt);
5756 int uid = LABEL_DECL_UID (label);
5757
5758 gcc_assert (uid > -1);
5759
5760 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5761 if (old_len <= (unsigned) uid)
5762 {
5763 new_len = 3 * uid / 2 + 1;
5764 VEC_safe_grow_cleared (basic_block, gc,
5765 cfg->x_label_to_block_map, new_len);
5766 }
5767
5768 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5769 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5770
5771 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5772
5773 if (uid >= dest_cfun->cfg->last_label_uid)
5774 dest_cfun->cfg->last_label_uid = uid + 1;
5775 }
5776 else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
5777 gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
5778
5779 region = lookup_stmt_eh_region (stmt);
5780 if (region >= 0)
5781 {
5782 add_stmt_to_eh_region_fn (dest_cfun, stmt, region + eh_offset);
5783 remove_stmt_from_eh_region (stmt);
5784 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5785 gimple_remove_stmt_histograms (cfun, stmt);
5786 }
5787
5788 /* We cannot leave any operands allocated from the operand caches of
5789 the current function. */
5790 free_stmt_operands (stmt);
5791 push_cfun (dest_cfun);
5792 update_stmt (stmt);
5793 pop_cfun ();
5794 }
5795
5796 FOR_EACH_EDGE (e, ei, bb->succs)
5797 if (e->goto_locus)
5798 {
5799 tree block = e->goto_block;
5800 if (d->orig_block == NULL_TREE
5801 || block == d->orig_block)
5802 e->goto_block = d->new_block;
5803 #ifdef ENABLE_CHECKING
5804 else if (block != d->new_block)
5805 {
5806 while (block && block != d->orig_block)
5807 block = BLOCK_SUPERCONTEXT (block);
5808 gcc_assert (block);
5809 }
5810 #endif
5811 }
5812 }
5813
5814 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5815 the outermost EH region. Use REGION as the incoming base EH region. */
5816
5817 static int
5818 find_outermost_region_in_block (struct function *src_cfun,
5819 basic_block bb, int region)
5820 {
5821 gimple_stmt_iterator si;
5822
5823 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5824 {
5825 gimple stmt = gsi_stmt (si);
5826 int stmt_region;
5827
5828 if (gimple_code (stmt) == GIMPLE_RESX)
5829 stmt_region = gimple_resx_region (stmt);
5830 else
5831 stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
5832 if (stmt_region > 0)
5833 {
5834 if (region < 0)
5835 region = stmt_region;
5836 else if (stmt_region != region)
5837 {
5838 region = eh_region_outermost (src_cfun, stmt_region, region);
5839 gcc_assert (region != -1);
5840 }
5841 }
5842 }
5843
5844 return region;
5845 }
5846
5847 static tree
5848 new_label_mapper (tree decl, void *data)
5849 {
5850 htab_t hash = (htab_t) data;
5851 struct tree_map *m;
5852 void **slot;
5853
5854 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
5855
5856 m = XNEW (struct tree_map);
5857 m->hash = DECL_UID (decl);
5858 m->base.from = decl;
5859 m->to = create_artificial_label ();
5860 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
5861 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
5862 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
5863
5864 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
5865 gcc_assert (*slot == NULL);
5866
5867 *slot = m;
5868
5869 return m->to;
5870 }
5871
5872 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
5873 subblocks. */
5874
5875 static void
5876 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
5877 tree to_context)
5878 {
5879 tree *tp, t;
5880
5881 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
5882 {
5883 t = *tp;
5884 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
5885 continue;
5886 replace_by_duplicate_decl (&t, vars_map, to_context);
5887 if (t != *tp)
5888 {
5889 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
5890 {
5891 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
5892 DECL_HAS_VALUE_EXPR_P (t) = 1;
5893 }
5894 TREE_CHAIN (t) = TREE_CHAIN (*tp);
5895 *tp = t;
5896 }
5897 }
5898
5899 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
5900 replace_block_vars_by_duplicates (block, vars_map, to_context);
5901 }
5902
5903 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
5904 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
5905 single basic block in the original CFG and the new basic block is
5906 returned. DEST_CFUN must not have a CFG yet.
5907
5908 Note that the region need not be a pure SESE region. Blocks inside
5909 the region may contain calls to abort/exit. The only restriction
5910 is that ENTRY_BB should be the only entry point and it must
5911 dominate EXIT_BB.
5912
5913 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
5914 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
5915 to the new function.
5916
5917 All local variables referenced in the region are assumed to be in
5918 the corresponding BLOCK_VARS and unexpanded variable lists
5919 associated with DEST_CFUN. */
5920
5921 basic_block
5922 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
5923 basic_block exit_bb, tree orig_block)
5924 {
5925 VEC(basic_block,heap) *bbs, *dom_bbs;
5926 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
5927 basic_block after, bb, *entry_pred, *exit_succ, abb;
5928 struct function *saved_cfun = cfun;
5929 int *entry_flag, *exit_flag, eh_offset;
5930 unsigned *entry_prob, *exit_prob;
5931 unsigned i, num_entry_edges, num_exit_edges;
5932 edge e;
5933 edge_iterator ei;
5934 htab_t new_label_map;
5935 struct pointer_map_t *vars_map;
5936 struct loop *loop = entry_bb->loop_father;
5937 struct move_stmt_d d;
5938
5939 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
5940 region. */
5941 gcc_assert (entry_bb != exit_bb
5942 && (!exit_bb
5943 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
5944
5945 /* Collect all the blocks in the region. Manually add ENTRY_BB
5946 because it won't be added by dfs_enumerate_from. */
5947 bbs = NULL;
5948 VEC_safe_push (basic_block, heap, bbs, entry_bb);
5949 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
5950
5951 /* The blocks that used to be dominated by something in BBS will now be
5952 dominated by the new block. */
5953 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
5954 VEC_address (basic_block, bbs),
5955 VEC_length (basic_block, bbs));
5956
5957 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
5958 the predecessor edges to ENTRY_BB and the successor edges to
5959 EXIT_BB so that we can re-attach them to the new basic block that
5960 will replace the region. */
5961 num_entry_edges = EDGE_COUNT (entry_bb->preds);
5962 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
5963 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
5964 entry_prob = XNEWVEC (unsigned, num_entry_edges);
5965 i = 0;
5966 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
5967 {
5968 entry_prob[i] = e->probability;
5969 entry_flag[i] = e->flags;
5970 entry_pred[i++] = e->src;
5971 remove_edge (e);
5972 }
5973
5974 if (exit_bb)
5975 {
5976 num_exit_edges = EDGE_COUNT (exit_bb->succs);
5977 exit_succ = (basic_block *) xcalloc (num_exit_edges,
5978 sizeof (basic_block));
5979 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
5980 exit_prob = XNEWVEC (unsigned, num_exit_edges);
5981 i = 0;
5982 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
5983 {
5984 exit_prob[i] = e->probability;
5985 exit_flag[i] = e->flags;
5986 exit_succ[i++] = e->dest;
5987 remove_edge (e);
5988 }
5989 }
5990 else
5991 {
5992 num_exit_edges = 0;
5993 exit_succ = NULL;
5994 exit_flag = NULL;
5995 exit_prob = NULL;
5996 }
5997
5998 /* Switch context to the child function to initialize DEST_FN's CFG. */
5999 gcc_assert (dest_cfun->cfg == NULL);
6000 push_cfun (dest_cfun);
6001
6002 init_empty_tree_cfg ();
6003
6004 /* Initialize EH information for the new function. */
6005 eh_offset = 0;
6006 new_label_map = NULL;
6007 if (saved_cfun->eh)
6008 {
6009 int region = -1;
6010
6011 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6012 region = find_outermost_region_in_block (saved_cfun, bb, region);
6013
6014 init_eh_for_function ();
6015 if (region != -1)
6016 {
6017 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6018 eh_offset = duplicate_eh_regions (saved_cfun, new_label_mapper,
6019 new_label_map, region, 0);
6020 }
6021 }
6022
6023 pop_cfun ();
6024
6025 /* Move blocks from BBS into DEST_CFUN. */
6026 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6027 after = dest_cfun->cfg->x_entry_block_ptr;
6028 vars_map = pointer_map_create ();
6029
6030 memset (&d, 0, sizeof (d));
6031 d.vars_map = vars_map;
6032 d.from_context = cfun->decl;
6033 d.to_context = dest_cfun->decl;
6034 d.new_label_map = new_label_map;
6035 d.remap_decls_p = true;
6036 d.orig_block = orig_block;
6037 d.new_block = DECL_INITIAL (dest_cfun->decl);
6038
6039 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6040 {
6041 /* No need to update edge counts on the last block. It has
6042 already been updated earlier when we detached the region from
6043 the original CFG. */
6044 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d, eh_offset);
6045 after = bb;
6046 }
6047
6048 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6049 if (orig_block)
6050 {
6051 tree block;
6052 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6053 == NULL_TREE);
6054 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6055 = BLOCK_SUBBLOCKS (orig_block);
6056 for (block = BLOCK_SUBBLOCKS (orig_block);
6057 block; block = BLOCK_CHAIN (block))
6058 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6059 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6060 }
6061
6062 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6063 vars_map, dest_cfun->decl);
6064
6065 if (new_label_map)
6066 htab_delete (new_label_map);
6067 pointer_map_destroy (vars_map);
6068
6069 /* Rewire the entry and exit blocks. The successor to the entry
6070 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6071 the child function. Similarly, the predecessor of DEST_FN's
6072 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6073 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6074 various CFG manipulation function get to the right CFG.
6075
6076 FIXME, this is silly. The CFG ought to become a parameter to
6077 these helpers. */
6078 push_cfun (dest_cfun);
6079 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6080 if (exit_bb)
6081 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6082 pop_cfun ();
6083
6084 /* Back in the original function, the SESE region has disappeared,
6085 create a new basic block in its place. */
6086 bb = create_empty_bb (entry_pred[0]);
6087 if (current_loops)
6088 add_bb_to_loop (bb, loop);
6089 for (i = 0; i < num_entry_edges; i++)
6090 {
6091 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6092 e->probability = entry_prob[i];
6093 }
6094
6095 for (i = 0; i < num_exit_edges; i++)
6096 {
6097 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6098 e->probability = exit_prob[i];
6099 }
6100
6101 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6102 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6103 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6104 VEC_free (basic_block, heap, dom_bbs);
6105
6106 if (exit_bb)
6107 {
6108 free (exit_prob);
6109 free (exit_flag);
6110 free (exit_succ);
6111 }
6112 free (entry_prob);
6113 free (entry_flag);
6114 free (entry_pred);
6115 VEC_free (basic_block, heap, bbs);
6116
6117 return bb;
6118 }
6119
6120
6121 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6122 */
6123
6124 void
6125 dump_function_to_file (tree fn, FILE *file, int flags)
6126 {
6127 tree arg, vars, var;
6128 struct function *dsf;
6129 bool ignore_topmost_bind = false, any_var = false;
6130 basic_block bb;
6131 tree chain;
6132
6133 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6134
6135 arg = DECL_ARGUMENTS (fn);
6136 while (arg)
6137 {
6138 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6139 fprintf (file, " ");
6140 print_generic_expr (file, arg, dump_flags);
6141 if (flags & TDF_VERBOSE)
6142 print_node (file, "", arg, 4);
6143 if (TREE_CHAIN (arg))
6144 fprintf (file, ", ");
6145 arg = TREE_CHAIN (arg);
6146 }
6147 fprintf (file, ")\n");
6148
6149 if (flags & TDF_VERBOSE)
6150 print_node (file, "", fn, 2);
6151
6152 dsf = DECL_STRUCT_FUNCTION (fn);
6153 if (dsf && (flags & TDF_DETAILS))
6154 dump_eh_tree (file, dsf);
6155
6156 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6157 {
6158 dump_node (fn, TDF_SLIM | flags, file);
6159 return;
6160 }
6161
6162 /* Switch CFUN to point to FN. */
6163 push_cfun (DECL_STRUCT_FUNCTION (fn));
6164
6165 /* When GIMPLE is lowered, the variables are no longer available in
6166 BIND_EXPRs, so display them separately. */
6167 if (cfun && cfun->decl == fn && cfun->local_decls)
6168 {
6169 ignore_topmost_bind = true;
6170
6171 fprintf (file, "{\n");
6172 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6173 {
6174 var = TREE_VALUE (vars);
6175
6176 print_generic_decl (file, var, flags);
6177 if (flags & TDF_VERBOSE)
6178 print_node (file, "", var, 4);
6179 fprintf (file, "\n");
6180
6181 any_var = true;
6182 }
6183 }
6184
6185 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6186 {
6187 /* If the CFG has been built, emit a CFG-based dump. */
6188 check_bb_profile (ENTRY_BLOCK_PTR, file);
6189 if (!ignore_topmost_bind)
6190 fprintf (file, "{\n");
6191
6192 if (any_var && n_basic_blocks)
6193 fprintf (file, "\n");
6194
6195 FOR_EACH_BB (bb)
6196 gimple_dump_bb (bb, file, 2, flags);
6197
6198 fprintf (file, "}\n");
6199 check_bb_profile (EXIT_BLOCK_PTR, file);
6200 }
6201 else if (DECL_SAVED_TREE (fn) == NULL)
6202 {
6203 /* The function is now in GIMPLE form but the CFG has not been
6204 built yet. Emit the single sequence of GIMPLE statements
6205 that make up its body. */
6206 gimple_seq body = gimple_body (fn);
6207
6208 if (gimple_seq_first_stmt (body)
6209 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6210 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6211 print_gimple_seq (file, body, 0, flags);
6212 else
6213 {
6214 if (!ignore_topmost_bind)
6215 fprintf (file, "{\n");
6216
6217 if (any_var)
6218 fprintf (file, "\n");
6219
6220 print_gimple_seq (file, body, 2, flags);
6221 fprintf (file, "}\n");
6222 }
6223 }
6224 else
6225 {
6226 int indent;
6227
6228 /* Make a tree based dump. */
6229 chain = DECL_SAVED_TREE (fn);
6230
6231 if (chain && TREE_CODE (chain) == BIND_EXPR)
6232 {
6233 if (ignore_topmost_bind)
6234 {
6235 chain = BIND_EXPR_BODY (chain);
6236 indent = 2;
6237 }
6238 else
6239 indent = 0;
6240 }
6241 else
6242 {
6243 if (!ignore_topmost_bind)
6244 fprintf (file, "{\n");
6245 indent = 2;
6246 }
6247
6248 if (any_var)
6249 fprintf (file, "\n");
6250
6251 print_generic_stmt_indented (file, chain, flags, indent);
6252 if (ignore_topmost_bind)
6253 fprintf (file, "}\n");
6254 }
6255
6256 fprintf (file, "\n\n");
6257
6258 /* Restore CFUN. */
6259 pop_cfun ();
6260 }
6261
6262
6263 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6264
6265 void
6266 debug_function (tree fn, int flags)
6267 {
6268 dump_function_to_file (fn, stderr, flags);
6269 }
6270
6271
6272 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6273
6274 static void
6275 print_pred_bbs (FILE *file, basic_block bb)
6276 {
6277 edge e;
6278 edge_iterator ei;
6279
6280 FOR_EACH_EDGE (e, ei, bb->preds)
6281 fprintf (file, "bb_%d ", e->src->index);
6282 }
6283
6284
6285 /* Print on FILE the indexes for the successors of basic_block BB. */
6286
6287 static void
6288 print_succ_bbs (FILE *file, basic_block bb)
6289 {
6290 edge e;
6291 edge_iterator ei;
6292
6293 FOR_EACH_EDGE (e, ei, bb->succs)
6294 fprintf (file, "bb_%d ", e->dest->index);
6295 }
6296
6297 /* Print to FILE the basic block BB following the VERBOSITY level. */
6298
6299 void
6300 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6301 {
6302 char *s_indent = (char *) alloca ((size_t) indent + 1);
6303 memset ((void *) s_indent, ' ', (size_t) indent);
6304 s_indent[indent] = '\0';
6305
6306 /* Print basic_block's header. */
6307 if (verbosity >= 2)
6308 {
6309 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6310 print_pred_bbs (file, bb);
6311 fprintf (file, "}, succs = {");
6312 print_succ_bbs (file, bb);
6313 fprintf (file, "})\n");
6314 }
6315
6316 /* Print basic_block's body. */
6317 if (verbosity >= 3)
6318 {
6319 fprintf (file, "%s {\n", s_indent);
6320 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6321 fprintf (file, "%s }\n", s_indent);
6322 }
6323 }
6324
6325 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6326
6327 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6328 VERBOSITY level this outputs the contents of the loop, or just its
6329 structure. */
6330
6331 static void
6332 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6333 {
6334 char *s_indent;
6335 basic_block bb;
6336
6337 if (loop == NULL)
6338 return;
6339
6340 s_indent = (char *) alloca ((size_t) indent + 1);
6341 memset ((void *) s_indent, ' ', (size_t) indent);
6342 s_indent[indent] = '\0';
6343
6344 /* Print loop's header. */
6345 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6346 loop->num, loop->header->index, loop->latch->index);
6347 fprintf (file, ", niter = ");
6348 print_generic_expr (file, loop->nb_iterations, 0);
6349
6350 if (loop->any_upper_bound)
6351 {
6352 fprintf (file, ", upper_bound = ");
6353 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6354 }
6355
6356 if (loop->any_estimate)
6357 {
6358 fprintf (file, ", estimate = ");
6359 dump_double_int (file, loop->nb_iterations_estimate, true);
6360 }
6361 fprintf (file, ")\n");
6362
6363 /* Print loop's body. */
6364 if (verbosity >= 1)
6365 {
6366 fprintf (file, "%s{\n", s_indent);
6367 FOR_EACH_BB (bb)
6368 if (bb->loop_father == loop)
6369 print_loops_bb (file, bb, indent, verbosity);
6370
6371 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6372 fprintf (file, "%s}\n", s_indent);
6373 }
6374 }
6375
6376 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6377 spaces. Following VERBOSITY level this outputs the contents of the
6378 loop, or just its structure. */
6379
6380 static void
6381 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6382 {
6383 if (loop == NULL)
6384 return;
6385
6386 print_loop (file, loop, indent, verbosity);
6387 print_loop_and_siblings (file, loop->next, indent, verbosity);
6388 }
6389
6390 /* Follow a CFG edge from the entry point of the program, and on entry
6391 of a loop, pretty print the loop structure on FILE. */
6392
6393 void
6394 print_loops (FILE *file, int verbosity)
6395 {
6396 basic_block bb;
6397
6398 bb = ENTRY_BLOCK_PTR;
6399 if (bb && bb->loop_father)
6400 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6401 }
6402
6403
6404 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6405
6406 void
6407 debug_loops (int verbosity)
6408 {
6409 print_loops (stderr, verbosity);
6410 }
6411
6412 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6413
6414 void
6415 debug_loop (struct loop *loop, int verbosity)
6416 {
6417 print_loop (stderr, loop, 0, verbosity);
6418 }
6419
6420 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6421 level. */
6422
6423 void
6424 debug_loop_num (unsigned num, int verbosity)
6425 {
6426 debug_loop (get_loop (num), verbosity);
6427 }
6428
6429 /* Return true if BB ends with a call, possibly followed by some
6430 instructions that must stay with the call. Return false,
6431 otherwise. */
6432
6433 static bool
6434 gimple_block_ends_with_call_p (basic_block bb)
6435 {
6436 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6437 return is_gimple_call (gsi_stmt (gsi));
6438 }
6439
6440
6441 /* Return true if BB ends with a conditional branch. Return false,
6442 otherwise. */
6443
6444 static bool
6445 gimple_block_ends_with_condjump_p (const_basic_block bb)
6446 {
6447 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6448 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6449 }
6450
6451
6452 /* Return true if we need to add fake edge to exit at statement T.
6453 Helper function for gimple_flow_call_edges_add. */
6454
6455 static bool
6456 need_fake_edge_p (gimple t)
6457 {
6458 tree fndecl = NULL_TREE;
6459 int call_flags = 0;
6460
6461 /* NORETURN and LONGJMP calls already have an edge to exit.
6462 CONST and PURE calls do not need one.
6463 We don't currently check for CONST and PURE here, although
6464 it would be a good idea, because those attributes are
6465 figured out from the RTL in mark_constant_function, and
6466 the counter incrementation code from -fprofile-arcs
6467 leads to different results from -fbranch-probabilities. */
6468 if (is_gimple_call (t))
6469 {
6470 fndecl = gimple_call_fndecl (t);
6471 call_flags = gimple_call_flags (t);
6472 }
6473
6474 if (is_gimple_call (t)
6475 && fndecl
6476 && DECL_BUILT_IN (fndecl)
6477 && (call_flags & ECF_NOTHROW)
6478 && !(call_flags & ECF_RETURNS_TWICE)
6479 /* fork() doesn't really return twice, but the effect of
6480 wrapping it in __gcov_fork() which calls __gcov_flush()
6481 and clears the counters before forking has the same
6482 effect as returning twice. Force a fake edge. */
6483 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6484 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6485 return false;
6486
6487 if (is_gimple_call (t)
6488 && !(call_flags & ECF_NORETURN))
6489 return true;
6490
6491 if (gimple_code (t) == GIMPLE_ASM
6492 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6493 return true;
6494
6495 return false;
6496 }
6497
6498
6499 /* Add fake edges to the function exit for any non constant and non
6500 noreturn calls, volatile inline assembly in the bitmap of blocks
6501 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6502 the number of blocks that were split.
6503
6504 The goal is to expose cases in which entering a basic block does
6505 not imply that all subsequent instructions must be executed. */
6506
6507 static int
6508 gimple_flow_call_edges_add (sbitmap blocks)
6509 {
6510 int i;
6511 int blocks_split = 0;
6512 int last_bb = last_basic_block;
6513 bool check_last_block = false;
6514
6515 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6516 return 0;
6517
6518 if (! blocks)
6519 check_last_block = true;
6520 else
6521 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6522
6523 /* In the last basic block, before epilogue generation, there will be
6524 a fallthru edge to EXIT. Special care is required if the last insn
6525 of the last basic block is a call because make_edge folds duplicate
6526 edges, which would result in the fallthru edge also being marked
6527 fake, which would result in the fallthru edge being removed by
6528 remove_fake_edges, which would result in an invalid CFG.
6529
6530 Moreover, we can't elide the outgoing fake edge, since the block
6531 profiler needs to take this into account in order to solve the minimal
6532 spanning tree in the case that the call doesn't return.
6533
6534 Handle this by adding a dummy instruction in a new last basic block. */
6535 if (check_last_block)
6536 {
6537 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6538 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6539 gimple t = NULL;
6540
6541 if (!gsi_end_p (gsi))
6542 t = gsi_stmt (gsi);
6543
6544 if (t && need_fake_edge_p (t))
6545 {
6546 edge e;
6547
6548 e = find_edge (bb, EXIT_BLOCK_PTR);
6549 if (e)
6550 {
6551 gsi_insert_on_edge (e, gimple_build_nop ());
6552 gsi_commit_edge_inserts ();
6553 }
6554 }
6555 }
6556
6557 /* Now add fake edges to the function exit for any non constant
6558 calls since there is no way that we can determine if they will
6559 return or not... */
6560 for (i = 0; i < last_bb; i++)
6561 {
6562 basic_block bb = BASIC_BLOCK (i);
6563 gimple_stmt_iterator gsi;
6564 gimple stmt, last_stmt;
6565
6566 if (!bb)
6567 continue;
6568
6569 if (blocks && !TEST_BIT (blocks, i))
6570 continue;
6571
6572 gsi = gsi_last_bb (bb);
6573 if (!gsi_end_p (gsi))
6574 {
6575 last_stmt = gsi_stmt (gsi);
6576 do
6577 {
6578 stmt = gsi_stmt (gsi);
6579 if (need_fake_edge_p (stmt))
6580 {
6581 edge e;
6582
6583 /* The handling above of the final block before the
6584 epilogue should be enough to verify that there is
6585 no edge to the exit block in CFG already.
6586 Calling make_edge in such case would cause us to
6587 mark that edge as fake and remove it later. */
6588 #ifdef ENABLE_CHECKING
6589 if (stmt == last_stmt)
6590 {
6591 e = find_edge (bb, EXIT_BLOCK_PTR);
6592 gcc_assert (e == NULL);
6593 }
6594 #endif
6595
6596 /* Note that the following may create a new basic block
6597 and renumber the existing basic blocks. */
6598 if (stmt != last_stmt)
6599 {
6600 e = split_block (bb, stmt);
6601 if (e)
6602 blocks_split++;
6603 }
6604 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6605 }
6606 gsi_prev (&gsi);
6607 }
6608 while (!gsi_end_p (gsi));
6609 }
6610 }
6611
6612 if (blocks_split)
6613 verify_flow_info ();
6614
6615 return blocks_split;
6616 }
6617
6618 /* Purge dead abnormal call edges from basic block BB. */
6619
6620 bool
6621 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6622 {
6623 bool changed = gimple_purge_dead_eh_edges (bb);
6624
6625 if (cfun->has_nonlocal_label)
6626 {
6627 gimple stmt = last_stmt (bb);
6628 edge_iterator ei;
6629 edge e;
6630
6631 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6632 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6633 {
6634 if (e->flags & EDGE_ABNORMAL)
6635 {
6636 remove_edge (e);
6637 changed = true;
6638 }
6639 else
6640 ei_next (&ei);
6641 }
6642
6643 /* See gimple_purge_dead_eh_edges below. */
6644 if (changed)
6645 free_dominance_info (CDI_DOMINATORS);
6646 }
6647
6648 return changed;
6649 }
6650
6651 /* Stores all basic blocks dominated by BB to DOM_BBS. */
6652
6653 static void
6654 get_all_dominated_blocks (basic_block bb, VEC (basic_block, heap) **dom_bbs)
6655 {
6656 basic_block son;
6657
6658 VEC_safe_push (basic_block, heap, *dom_bbs, bb);
6659 for (son = first_dom_son (CDI_DOMINATORS, bb);
6660 son;
6661 son = next_dom_son (CDI_DOMINATORS, son))
6662 get_all_dominated_blocks (son, dom_bbs);
6663 }
6664
6665 /* Removes edge E and all the blocks dominated by it, and updates dominance
6666 information. The IL in E->src needs to be updated separately.
6667 If dominance info is not available, only the edge E is removed.*/
6668
6669 void
6670 remove_edge_and_dominated_blocks (edge e)
6671 {
6672 VEC (basic_block, heap) *bbs_to_remove = NULL;
6673 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6674 bitmap df, df_idom;
6675 edge f;
6676 edge_iterator ei;
6677 bool none_removed = false;
6678 unsigned i;
6679 basic_block bb, dbb;
6680 bitmap_iterator bi;
6681
6682 if (!dom_info_available_p (CDI_DOMINATORS))
6683 {
6684 remove_edge (e);
6685 return;
6686 }
6687
6688 /* No updating is needed for edges to exit. */
6689 if (e->dest == EXIT_BLOCK_PTR)
6690 {
6691 if (cfgcleanup_altered_bbs)
6692 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6693 remove_edge (e);
6694 return;
6695 }
6696
6697 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6698 that is not dominated by E->dest, then this set is empty. Otherwise,
6699 all the basic blocks dominated by E->dest are removed.
6700
6701 Also, to DF_IDOM we store the immediate dominators of the blocks in
6702 the dominance frontier of E (i.e., of the successors of the
6703 removed blocks, if there are any, and of E->dest otherwise). */
6704 FOR_EACH_EDGE (f, ei, e->dest->preds)
6705 {
6706 if (f == e)
6707 continue;
6708
6709 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6710 {
6711 none_removed = true;
6712 break;
6713 }
6714 }
6715
6716 df = BITMAP_ALLOC (NULL);
6717 df_idom = BITMAP_ALLOC (NULL);
6718
6719 if (none_removed)
6720 bitmap_set_bit (df_idom,
6721 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6722 else
6723 {
6724 get_all_dominated_blocks (e->dest, &bbs_to_remove);
6725 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6726 {
6727 FOR_EACH_EDGE (f, ei, bb->succs)
6728 {
6729 if (f->dest != EXIT_BLOCK_PTR)
6730 bitmap_set_bit (df, f->dest->index);
6731 }
6732 }
6733 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6734 bitmap_clear_bit (df, bb->index);
6735
6736 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6737 {
6738 bb = BASIC_BLOCK (i);
6739 bitmap_set_bit (df_idom,
6740 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6741 }
6742 }
6743
6744 if (cfgcleanup_altered_bbs)
6745 {
6746 /* Record the set of the altered basic blocks. */
6747 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6748 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6749 }
6750
6751 /* Remove E and the cancelled blocks. */
6752 if (none_removed)
6753 remove_edge (e);
6754 else
6755 {
6756 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6757 delete_basic_block (bb);
6758 }
6759
6760 /* Update the dominance information. The immediate dominator may change only
6761 for blocks whose immediate dominator belongs to DF_IDOM:
6762
6763 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6764 removal. Let Z the arbitrary block such that idom(Z) = Y and
6765 Z dominates X after the removal. Before removal, there exists a path P
6766 from Y to X that avoids Z. Let F be the last edge on P that is
6767 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6768 dominates W, and because of P, Z does not dominate W), and W belongs to
6769 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6770 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6771 {
6772 bb = BASIC_BLOCK (i);
6773 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6774 dbb;
6775 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6776 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6777 }
6778
6779 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6780
6781 BITMAP_FREE (df);
6782 BITMAP_FREE (df_idom);
6783 VEC_free (basic_block, heap, bbs_to_remove);
6784 VEC_free (basic_block, heap, bbs_to_fix_dom);
6785 }
6786
6787 /* Purge dead EH edges from basic block BB. */
6788
6789 bool
6790 gimple_purge_dead_eh_edges (basic_block bb)
6791 {
6792 bool changed = false;
6793 edge e;
6794 edge_iterator ei;
6795 gimple stmt = last_stmt (bb);
6796
6797 if (stmt && stmt_can_throw_internal (stmt))
6798 return false;
6799
6800 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6801 {
6802 if (e->flags & EDGE_EH)
6803 {
6804 remove_edge_and_dominated_blocks (e);
6805 changed = true;
6806 }
6807 else
6808 ei_next (&ei);
6809 }
6810
6811 return changed;
6812 }
6813
6814 bool
6815 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6816 {
6817 bool changed = false;
6818 unsigned i;
6819 bitmap_iterator bi;
6820
6821 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6822 {
6823 basic_block bb = BASIC_BLOCK (i);
6824
6825 /* Earlier gimple_purge_dead_eh_edges could have removed
6826 this basic block already. */
6827 gcc_assert (bb || changed);
6828 if (bb != NULL)
6829 changed |= gimple_purge_dead_eh_edges (bb);
6830 }
6831
6832 return changed;
6833 }
6834
6835 /* This function is called whenever a new edge is created or
6836 redirected. */
6837
6838 static void
6839 gimple_execute_on_growing_pred (edge e)
6840 {
6841 basic_block bb = e->dest;
6842
6843 if (phi_nodes (bb))
6844 reserve_phi_args_for_new_edge (bb);
6845 }
6846
6847 /* This function is called immediately before edge E is removed from
6848 the edge vector E->dest->preds. */
6849
6850 static void
6851 gimple_execute_on_shrinking_pred (edge e)
6852 {
6853 if (phi_nodes (e->dest))
6854 remove_phi_args (e);
6855 }
6856
6857 /*---------------------------------------------------------------------------
6858 Helper functions for Loop versioning
6859 ---------------------------------------------------------------------------*/
6860
6861 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
6862 of 'first'. Both of them are dominated by 'new_head' basic block. When
6863 'new_head' was created by 'second's incoming edge it received phi arguments
6864 on the edge by split_edge(). Later, additional edge 'e' was created to
6865 connect 'new_head' and 'first'. Now this routine adds phi args on this
6866 additional edge 'e' that new_head to second edge received as part of edge
6867 splitting. */
6868
6869 static void
6870 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
6871 basic_block new_head, edge e)
6872 {
6873 gimple phi1, phi2;
6874 gimple_stmt_iterator psi1, psi2;
6875 tree def;
6876 edge e2 = find_edge (new_head, second);
6877
6878 /* Because NEW_HEAD has been created by splitting SECOND's incoming
6879 edge, we should always have an edge from NEW_HEAD to SECOND. */
6880 gcc_assert (e2 != NULL);
6881
6882 /* Browse all 'second' basic block phi nodes and add phi args to
6883 edge 'e' for 'first' head. PHI args are always in correct order. */
6884
6885 for (psi2 = gsi_start_phis (second),
6886 psi1 = gsi_start_phis (first);
6887 !gsi_end_p (psi2) && !gsi_end_p (psi1);
6888 gsi_next (&psi2), gsi_next (&psi1))
6889 {
6890 phi1 = gsi_stmt (psi1);
6891 phi2 = gsi_stmt (psi2);
6892 def = PHI_ARG_DEF (phi2, e2->dest_idx);
6893 add_phi_arg (phi1, def, e);
6894 }
6895 }
6896
6897
6898 /* Adds a if else statement to COND_BB with condition COND_EXPR.
6899 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
6900 the destination of the ELSE part. */
6901
6902 static void
6903 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
6904 basic_block second_head ATTRIBUTE_UNUSED,
6905 basic_block cond_bb, void *cond_e)
6906 {
6907 gimple_stmt_iterator gsi;
6908 gimple new_cond_expr;
6909 tree cond_expr = (tree) cond_e;
6910 edge e0;
6911
6912 /* Build new conditional expr */
6913 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
6914 NULL_TREE, NULL_TREE);
6915
6916 /* Add new cond in cond_bb. */
6917 gsi = gsi_last_bb (cond_bb);
6918 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
6919
6920 /* Adjust edges appropriately to connect new head with first head
6921 as well as second head. */
6922 e0 = single_succ_edge (cond_bb);
6923 e0->flags &= ~EDGE_FALLTHRU;
6924 e0->flags |= EDGE_FALSE_VALUE;
6925 }
6926
6927 struct cfg_hooks gimple_cfg_hooks = {
6928 "gimple",
6929 gimple_verify_flow_info,
6930 gimple_dump_bb, /* dump_bb */
6931 create_bb, /* create_basic_block */
6932 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
6933 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
6934 gimple_can_remove_branch_p, /* can_remove_branch_p */
6935 remove_bb, /* delete_basic_block */
6936 gimple_split_block, /* split_block */
6937 gimple_move_block_after, /* move_block_after */
6938 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
6939 gimple_merge_blocks, /* merge_blocks */
6940 gimple_predict_edge, /* predict_edge */
6941 gimple_predicted_by_p, /* predicted_by_p */
6942 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
6943 gimple_duplicate_bb, /* duplicate_block */
6944 gimple_split_edge, /* split_edge */
6945 gimple_make_forwarder_block, /* make_forward_block */
6946 NULL, /* tidy_fallthru_edge */
6947 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
6948 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
6949 gimple_flow_call_edges_add, /* flow_call_edges_add */
6950 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
6951 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
6952 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
6953 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
6954 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
6955 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
6956 flush_pending_stmts /* flush_pending_stmts */
6957 };
6958
6959
6960 /* Split all critical edges. */
6961
6962 static unsigned int
6963 split_critical_edges (void)
6964 {
6965 basic_block bb;
6966 edge e;
6967 edge_iterator ei;
6968
6969 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
6970 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
6971 mappings around the calls to split_edge. */
6972 start_recording_case_labels ();
6973 FOR_ALL_BB (bb)
6974 {
6975 FOR_EACH_EDGE (e, ei, bb->succs)
6976 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
6977 {
6978 split_edge (e);
6979 }
6980 }
6981 end_recording_case_labels ();
6982 return 0;
6983 }
6984
6985 struct gimple_opt_pass pass_split_crit_edges =
6986 {
6987 {
6988 GIMPLE_PASS,
6989 "crited", /* name */
6990 NULL, /* gate */
6991 split_critical_edges, /* execute */
6992 NULL, /* sub */
6993 NULL, /* next */
6994 0, /* static_pass_number */
6995 TV_TREE_SPLIT_EDGES, /* tv_id */
6996 PROP_cfg, /* properties required */
6997 PROP_no_crit_edges, /* properties_provided */
6998 0, /* properties_destroyed */
6999 0, /* todo_flags_start */
7000 TODO_dump_func /* todo_flags_finish */
7001 }
7002 };
7003
7004
7005 /* Build a ternary operation and gimplify it. Emit code before GSI.
7006 Return the gimple_val holding the result. */
7007
7008 tree
7009 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7010 tree type, tree a, tree b, tree c)
7011 {
7012 tree ret;
7013
7014 ret = fold_build3 (code, type, a, b, c);
7015 STRIP_NOPS (ret);
7016
7017 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7018 GSI_SAME_STMT);
7019 }
7020
7021 /* Build a binary operation and gimplify it. Emit code before GSI.
7022 Return the gimple_val holding the result. */
7023
7024 tree
7025 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7026 tree type, tree a, tree b)
7027 {
7028 tree ret;
7029
7030 ret = fold_build2 (code, type, a, b);
7031 STRIP_NOPS (ret);
7032
7033 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7034 GSI_SAME_STMT);
7035 }
7036
7037 /* Build a unary operation and gimplify it. Emit code before GSI.
7038 Return the gimple_val holding the result. */
7039
7040 tree
7041 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7042 tree a)
7043 {
7044 tree ret;
7045
7046 ret = fold_build1 (code, type, a);
7047 STRIP_NOPS (ret);
7048
7049 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7050 GSI_SAME_STMT);
7051 }
7052
7053
7054 \f
7055 /* Emit return warnings. */
7056
7057 static unsigned int
7058 execute_warn_function_return (void)
7059 {
7060 source_location location;
7061 gimple last;
7062 edge e;
7063 edge_iterator ei;
7064
7065 /* If we have a path to EXIT, then we do return. */
7066 if (TREE_THIS_VOLATILE (cfun->decl)
7067 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7068 {
7069 location = UNKNOWN_LOCATION;
7070 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7071 {
7072 last = last_stmt (e->src);
7073 if (gimple_code (last) == GIMPLE_RETURN
7074 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7075 break;
7076 }
7077 if (location == UNKNOWN_LOCATION)
7078 location = cfun->function_end_locus;
7079 warning (0, "%H%<noreturn%> function does return", &location);
7080 }
7081
7082 /* If we see "return;" in some basic block, then we do reach the end
7083 without returning a value. */
7084 else if (warn_return_type
7085 && !TREE_NO_WARNING (cfun->decl)
7086 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7087 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7088 {
7089 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7090 {
7091 gimple last = last_stmt (e->src);
7092 if (gimple_code (last) == GIMPLE_RETURN
7093 && gimple_return_retval (last) == NULL
7094 && !gimple_no_warning_p (last))
7095 {
7096 location = gimple_location (last);
7097 if (location == UNKNOWN_LOCATION)
7098 location = cfun->function_end_locus;
7099 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7100 TREE_NO_WARNING (cfun->decl) = 1;
7101 break;
7102 }
7103 }
7104 }
7105 return 0;
7106 }
7107
7108
7109 /* Given a basic block B which ends with a conditional and has
7110 precisely two successors, determine which of the edges is taken if
7111 the conditional is true and which is taken if the conditional is
7112 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7113
7114 void
7115 extract_true_false_edges_from_block (basic_block b,
7116 edge *true_edge,
7117 edge *false_edge)
7118 {
7119 edge e = EDGE_SUCC (b, 0);
7120
7121 if (e->flags & EDGE_TRUE_VALUE)
7122 {
7123 *true_edge = e;
7124 *false_edge = EDGE_SUCC (b, 1);
7125 }
7126 else
7127 {
7128 *false_edge = e;
7129 *true_edge = EDGE_SUCC (b, 1);
7130 }
7131 }
7132
7133 struct gimple_opt_pass pass_warn_function_return =
7134 {
7135 {
7136 GIMPLE_PASS,
7137 NULL, /* name */
7138 NULL, /* gate */
7139 execute_warn_function_return, /* execute */
7140 NULL, /* sub */
7141 NULL, /* next */
7142 0, /* static_pass_number */
7143 0, /* tv_id */
7144 PROP_cfg, /* properties_required */
7145 0, /* properties_provided */
7146 0, /* properties_destroyed */
7147 0, /* todo_flags_start */
7148 0 /* todo_flags_finish */
7149 }
7150 };
7151
7152 /* Emit noreturn warnings. */
7153
7154 static unsigned int
7155 execute_warn_function_noreturn (void)
7156 {
7157 if (warn_missing_noreturn
7158 && !TREE_THIS_VOLATILE (cfun->decl)
7159 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0
7160 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
7161 warning (OPT_Wmissing_noreturn, "%Jfunction might be possible candidate "
7162 "for attribute %<noreturn%>",
7163 cfun->decl);
7164 return 0;
7165 }
7166
7167 struct gimple_opt_pass pass_warn_function_noreturn =
7168 {
7169 {
7170 GIMPLE_PASS,
7171 NULL, /* name */
7172 NULL, /* gate */
7173 execute_warn_function_noreturn, /* execute */
7174 NULL, /* sub */
7175 NULL, /* next */
7176 0, /* static_pass_number */
7177 0, /* tv_id */
7178 PROP_cfg, /* properties_required */
7179 0, /* properties_provided */
7180 0, /* properties_destroyed */
7181 0, /* todo_flags_start */
7182 0 /* todo_flags_finish */
7183 }
7184 };