intrinsic.h (gfc_check_selected_real_kind, [...]): Update prototypes.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010 Free Software Foundation, Inc.
4 Contributed by Diego Novillo <dnovillo@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "tree-pretty-print.h"
35 #include "gimple-pretty-print.h"
36 #include "tree-flow.h"
37 #include "timevar.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "toplev.h"
41 #include "except.h"
42 #include "cfgloop.h"
43 #include "cfglayout.h"
44 #include "tree-ssa-propagate.h"
45 #include "value-prof.h"
46 #include "pointer-set.h"
47 #include "tree-inline.h"
48
49 /* This file contains functions for building the Control Flow Graph (CFG)
50 for a function tree. */
51
52 /* Local declarations. */
53
54 /* Initial capacity for the basic block array. */
55 static const int initial_cfg_capacity = 20;
56
57 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
58 which use a particular edge. The CASE_LABEL_EXPRs are chained together
59 via their TREE_CHAIN field, which we clear after we're done with the
60 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
61
62 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
63 update the case vector in response to edge redirections.
64
65 Right now this table is set up and torn down at key points in the
66 compilation process. It would be nice if we could make the table
67 more persistent. The key is getting notification of changes to
68 the CFG (particularly edge removal, creation and redirection). */
69
70 static struct pointer_map_t *edge_to_cases;
71
72 /* If we record edge_to_cases, this bitmap will hold indexes
73 of basic blocks that end in a GIMPLE_SWITCH which we touched
74 due to edge manipulations. */
75
76 static bitmap touched_switch_bbs;
77
78 /* CFG statistics. */
79 struct cfg_stats_d
80 {
81 long num_merged_labels;
82 };
83
84 static struct cfg_stats_d cfg_stats;
85
86 /* Nonzero if we found a computed goto while building basic blocks. */
87 static bool found_computed_goto;
88
89 /* Hash table to store last discriminator assigned for each locus. */
90 struct locus_discrim_map
91 {
92 location_t locus;
93 int discriminator;
94 };
95 static htab_t discriminator_per_locus;
96
97 /* Basic blocks and flowgraphs. */
98 static void make_blocks (gimple_seq);
99 static void factor_computed_gotos (void);
100
101 /* Edges. */
102 static void make_edges (void);
103 static void make_cond_expr_edges (basic_block);
104 static void make_gimple_switch_edges (basic_block);
105 static void make_goto_expr_edges (basic_block);
106 static void make_gimple_asm_edges (basic_block);
107 static unsigned int locus_map_hash (const void *);
108 static int locus_map_eq (const void *, const void *);
109 static void assign_discriminator (location_t, basic_block);
110 static edge gimple_redirect_edge_and_branch (edge, basic_block);
111 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
112 static unsigned int split_critical_edges (void);
113
114 /* Various helpers. */
115 static inline bool stmt_starts_bb_p (gimple, gimple);
116 static int gimple_verify_flow_info (void);
117 static void gimple_make_forwarder_block (edge);
118 static void gimple_cfg2vcg (FILE *);
119 static gimple first_non_label_stmt (basic_block);
120
121 /* Flowgraph optimization and cleanup. */
122 static void gimple_merge_blocks (basic_block, basic_block);
123 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
124 static void remove_bb (basic_block);
125 static edge find_taken_edge_computed_goto (basic_block, tree);
126 static edge find_taken_edge_cond_expr (basic_block, tree);
127 static edge find_taken_edge_switch_expr (basic_block, tree);
128 static tree find_case_label_for_value (gimple, tree);
129 static void group_case_labels_stmt (gimple);
130
131 void
132 init_empty_tree_cfg_for_function (struct function *fn)
133 {
134 /* Initialize the basic block array. */
135 init_flow (fn);
136 profile_status_for_function (fn) = PROFILE_ABSENT;
137 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
138 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
139 basic_block_info_for_function (fn)
140 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
141 VEC_safe_grow_cleared (basic_block, gc,
142 basic_block_info_for_function (fn),
143 initial_cfg_capacity);
144
145 /* Build a mapping of labels to their associated blocks. */
146 label_to_block_map_for_function (fn)
147 = VEC_alloc (basic_block, gc, initial_cfg_capacity);
148 VEC_safe_grow_cleared (basic_block, gc,
149 label_to_block_map_for_function (fn),
150 initial_cfg_capacity);
151
152 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
153 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
154 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
155 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
156
157 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
158 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
159 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
160 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
161 }
162
163 void
164 init_empty_tree_cfg (void)
165 {
166 init_empty_tree_cfg_for_function (cfun);
167 }
168
169 /*---------------------------------------------------------------------------
170 Create basic blocks
171 ---------------------------------------------------------------------------*/
172
173 /* Entry point to the CFG builder for trees. SEQ is the sequence of
174 statements to be added to the flowgraph. */
175
176 static void
177 build_gimple_cfg (gimple_seq seq)
178 {
179 /* Register specific gimple functions. */
180 gimple_register_cfg_hooks ();
181
182 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
183
184 init_empty_tree_cfg ();
185
186 found_computed_goto = 0;
187 make_blocks (seq);
188
189 /* Computed gotos are hell to deal with, especially if there are
190 lots of them with a large number of destinations. So we factor
191 them to a common computed goto location before we build the
192 edge list. After we convert back to normal form, we will un-factor
193 the computed gotos since factoring introduces an unwanted jump. */
194 if (found_computed_goto)
195 factor_computed_gotos ();
196
197 /* Make sure there is always at least one block, even if it's empty. */
198 if (n_basic_blocks == NUM_FIXED_BLOCKS)
199 create_empty_bb (ENTRY_BLOCK_PTR);
200
201 /* Adjust the size of the array. */
202 if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
203 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
204
205 /* To speed up statement iterator walks, we first purge dead labels. */
206 cleanup_dead_labels ();
207
208 /* Group case nodes to reduce the number of edges.
209 We do this after cleaning up dead labels because otherwise we miss
210 a lot of obvious case merging opportunities. */
211 group_case_labels ();
212
213 /* Create the edges of the flowgraph. */
214 discriminator_per_locus = htab_create (13, locus_map_hash, locus_map_eq,
215 free);
216 make_edges ();
217 cleanup_dead_labels ();
218 htab_delete (discriminator_per_locus);
219
220 /* Debugging dumps. */
221
222 /* Write the flowgraph to a VCG file. */
223 {
224 int local_dump_flags;
225 FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
226 if (vcg_file)
227 {
228 gimple_cfg2vcg (vcg_file);
229 dump_end (TDI_vcg, vcg_file);
230 }
231 }
232
233 #ifdef ENABLE_CHECKING
234 verify_stmts ();
235 #endif
236 }
237
238 static unsigned int
239 execute_build_cfg (void)
240 {
241 gimple_seq body = gimple_body (current_function_decl);
242
243 build_gimple_cfg (body);
244 gimple_set_body (current_function_decl, NULL);
245 if (dump_file && (dump_flags & TDF_DETAILS))
246 {
247 fprintf (dump_file, "Scope blocks:\n");
248 dump_scope_blocks (dump_file, dump_flags);
249 }
250 return 0;
251 }
252
253 struct gimple_opt_pass pass_build_cfg =
254 {
255 {
256 GIMPLE_PASS,
257 "cfg", /* name */
258 NULL, /* gate */
259 execute_build_cfg, /* execute */
260 NULL, /* sub */
261 NULL, /* next */
262 0, /* static_pass_number */
263 TV_TREE_CFG, /* tv_id */
264 PROP_gimple_leh, /* properties_required */
265 PROP_cfg, /* properties_provided */
266 0, /* properties_destroyed */
267 0, /* todo_flags_start */
268 TODO_verify_stmts | TODO_cleanup_cfg
269 | TODO_dump_func /* todo_flags_finish */
270 }
271 };
272
273
274 /* Return true if T is a computed goto. */
275
276 static bool
277 computed_goto_p (gimple t)
278 {
279 return (gimple_code (t) == GIMPLE_GOTO
280 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
281 }
282
283
284 /* Search the CFG for any computed gotos. If found, factor them to a
285 common computed goto site. Also record the location of that site so
286 that we can un-factor the gotos after we have converted back to
287 normal form. */
288
289 static void
290 factor_computed_gotos (void)
291 {
292 basic_block bb;
293 tree factored_label_decl = NULL;
294 tree var = NULL;
295 gimple factored_computed_goto_label = NULL;
296 gimple factored_computed_goto = NULL;
297
298 /* We know there are one or more computed gotos in this function.
299 Examine the last statement in each basic block to see if the block
300 ends with a computed goto. */
301
302 FOR_EACH_BB (bb)
303 {
304 gimple_stmt_iterator gsi = gsi_last_bb (bb);
305 gimple last;
306
307 if (gsi_end_p (gsi))
308 continue;
309
310 last = gsi_stmt (gsi);
311
312 /* Ignore the computed goto we create when we factor the original
313 computed gotos. */
314 if (last == factored_computed_goto)
315 continue;
316
317 /* If the last statement is a computed goto, factor it. */
318 if (computed_goto_p (last))
319 {
320 gimple assignment;
321
322 /* The first time we find a computed goto we need to create
323 the factored goto block and the variable each original
324 computed goto will use for their goto destination. */
325 if (!factored_computed_goto)
326 {
327 basic_block new_bb = create_empty_bb (bb);
328 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
329
330 /* Create the destination of the factored goto. Each original
331 computed goto will put its desired destination into this
332 variable and jump to the label we create immediately
333 below. */
334 var = create_tmp_var (ptr_type_node, "gotovar");
335
336 /* Build a label for the new block which will contain the
337 factored computed goto. */
338 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
339 factored_computed_goto_label
340 = gimple_build_label (factored_label_decl);
341 gsi_insert_after (&new_gsi, factored_computed_goto_label,
342 GSI_NEW_STMT);
343
344 /* Build our new computed goto. */
345 factored_computed_goto = gimple_build_goto (var);
346 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
347 }
348
349 /* Copy the original computed goto's destination into VAR. */
350 assignment = gimple_build_assign (var, gimple_goto_dest (last));
351 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
352
353 /* And re-vector the computed goto to the new destination. */
354 gimple_goto_set_dest (last, factored_label_decl);
355 }
356 }
357 }
358
359
360 /* Build a flowgraph for the sequence of stmts SEQ. */
361
362 static void
363 make_blocks (gimple_seq seq)
364 {
365 gimple_stmt_iterator i = gsi_start (seq);
366 gimple stmt = NULL;
367 bool start_new_block = true;
368 bool first_stmt_of_seq = true;
369 basic_block bb = ENTRY_BLOCK_PTR;
370
371 while (!gsi_end_p (i))
372 {
373 gimple prev_stmt;
374
375 prev_stmt = stmt;
376 stmt = gsi_stmt (i);
377
378 /* If the statement starts a new basic block or if we have determined
379 in a previous pass that we need to create a new block for STMT, do
380 so now. */
381 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
382 {
383 if (!first_stmt_of_seq)
384 seq = gsi_split_seq_before (&i);
385 bb = create_basic_block (seq, NULL, bb);
386 start_new_block = false;
387 }
388
389 /* Now add STMT to BB and create the subgraphs for special statement
390 codes. */
391 gimple_set_bb (stmt, bb);
392
393 if (computed_goto_p (stmt))
394 found_computed_goto = true;
395
396 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
397 next iteration. */
398 if (stmt_ends_bb_p (stmt))
399 {
400 /* If the stmt can make abnormal goto use a new temporary
401 for the assignment to the LHS. This makes sure the old value
402 of the LHS is available on the abnormal edge. Otherwise
403 we will end up with overlapping life-ranges for abnormal
404 SSA names. */
405 if (gimple_has_lhs (stmt)
406 && stmt_can_make_abnormal_goto (stmt)
407 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
408 {
409 tree lhs = gimple_get_lhs (stmt);
410 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
411 gimple s = gimple_build_assign (lhs, tmp);
412 gimple_set_location (s, gimple_location (stmt));
413 gimple_set_block (s, gimple_block (stmt));
414 gimple_set_lhs (stmt, tmp);
415 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
416 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
417 DECL_GIMPLE_REG_P (tmp) = 1;
418 gsi_insert_after (&i, s, GSI_SAME_STMT);
419 }
420 start_new_block = true;
421 }
422
423 gsi_next (&i);
424 first_stmt_of_seq = false;
425 }
426 }
427
428
429 /* Create and return a new empty basic block after bb AFTER. */
430
431 static basic_block
432 create_bb (void *h, void *e, basic_block after)
433 {
434 basic_block bb;
435
436 gcc_assert (!e);
437
438 /* Create and initialize a new basic block. Since alloc_block uses
439 GC allocation that clears memory to allocate a basic block, we do
440 not have to clear the newly allocated basic block here. */
441 bb = alloc_block ();
442
443 bb->index = last_basic_block;
444 bb->flags = BB_NEW;
445 bb->il.gimple = ggc_alloc_cleared_gimple_bb_info ();
446 set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
447
448 /* Add the new block to the linked list of blocks. */
449 link_block (bb, after);
450
451 /* Grow the basic block array if needed. */
452 if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
453 {
454 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
455 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
456 }
457
458 /* Add the newly created block to the array. */
459 SET_BASIC_BLOCK (last_basic_block, bb);
460
461 n_basic_blocks++;
462 last_basic_block++;
463
464 return bb;
465 }
466
467
468 /*---------------------------------------------------------------------------
469 Edge creation
470 ---------------------------------------------------------------------------*/
471
472 /* Fold COND_EXPR_COND of each COND_EXPR. */
473
474 void
475 fold_cond_expr_cond (void)
476 {
477 basic_block bb;
478
479 FOR_EACH_BB (bb)
480 {
481 gimple stmt = last_stmt (bb);
482
483 if (stmt && gimple_code (stmt) == GIMPLE_COND)
484 {
485 location_t loc = gimple_location (stmt);
486 tree cond;
487 bool zerop, onep;
488
489 fold_defer_overflow_warnings ();
490 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
491 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
492 if (cond)
493 {
494 zerop = integer_zerop (cond);
495 onep = integer_onep (cond);
496 }
497 else
498 zerop = onep = false;
499
500 fold_undefer_overflow_warnings (zerop || onep,
501 stmt,
502 WARN_STRICT_OVERFLOW_CONDITIONAL);
503 if (zerop)
504 gimple_cond_make_false (stmt);
505 else if (onep)
506 gimple_cond_make_true (stmt);
507 }
508 }
509 }
510
511 /* Join all the blocks in the flowgraph. */
512
513 static void
514 make_edges (void)
515 {
516 basic_block bb;
517 struct omp_region *cur_region = NULL;
518
519 /* Create an edge from entry to the first block with executable
520 statements in it. */
521 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
522
523 /* Traverse the basic block array placing edges. */
524 FOR_EACH_BB (bb)
525 {
526 gimple last = last_stmt (bb);
527 bool fallthru;
528
529 if (last)
530 {
531 enum gimple_code code = gimple_code (last);
532 switch (code)
533 {
534 case GIMPLE_GOTO:
535 make_goto_expr_edges (bb);
536 fallthru = false;
537 break;
538 case GIMPLE_RETURN:
539 make_edge (bb, EXIT_BLOCK_PTR, 0);
540 fallthru = false;
541 break;
542 case GIMPLE_COND:
543 make_cond_expr_edges (bb);
544 fallthru = false;
545 break;
546 case GIMPLE_SWITCH:
547 make_gimple_switch_edges (bb);
548 fallthru = false;
549 break;
550 case GIMPLE_RESX:
551 make_eh_edges (last);
552 fallthru = false;
553 break;
554 case GIMPLE_EH_DISPATCH:
555 fallthru = make_eh_dispatch_edges (last);
556 break;
557
558 case GIMPLE_CALL:
559 /* If this function receives a nonlocal goto, then we need to
560 make edges from this call site to all the nonlocal goto
561 handlers. */
562 if (stmt_can_make_abnormal_goto (last))
563 make_abnormal_goto_edges (bb, true);
564
565 /* If this statement has reachable exception handlers, then
566 create abnormal edges to them. */
567 make_eh_edges (last);
568
569 /* BUILTIN_RETURN is really a return statement. */
570 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
571 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
572 /* Some calls are known not to return. */
573 else
574 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
575 break;
576
577 case GIMPLE_ASSIGN:
578 /* A GIMPLE_ASSIGN may throw internally and thus be considered
579 control-altering. */
580 if (is_ctrl_altering_stmt (last))
581 make_eh_edges (last);
582 fallthru = true;
583 break;
584
585 case GIMPLE_ASM:
586 make_gimple_asm_edges (bb);
587 fallthru = true;
588 break;
589
590 case GIMPLE_OMP_PARALLEL:
591 case GIMPLE_OMP_TASK:
592 case GIMPLE_OMP_FOR:
593 case GIMPLE_OMP_SINGLE:
594 case GIMPLE_OMP_MASTER:
595 case GIMPLE_OMP_ORDERED:
596 case GIMPLE_OMP_CRITICAL:
597 case GIMPLE_OMP_SECTION:
598 cur_region = new_omp_region (bb, code, cur_region);
599 fallthru = true;
600 break;
601
602 case GIMPLE_OMP_SECTIONS:
603 cur_region = new_omp_region (bb, code, cur_region);
604 fallthru = true;
605 break;
606
607 case GIMPLE_OMP_SECTIONS_SWITCH:
608 fallthru = false;
609 break;
610
611 case GIMPLE_OMP_ATOMIC_LOAD:
612 case GIMPLE_OMP_ATOMIC_STORE:
613 fallthru = true;
614 break;
615
616 case GIMPLE_OMP_RETURN:
617 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
618 somewhere other than the next block. This will be
619 created later. */
620 cur_region->exit = bb;
621 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
622 cur_region = cur_region->outer;
623 break;
624
625 case GIMPLE_OMP_CONTINUE:
626 cur_region->cont = bb;
627 switch (cur_region->type)
628 {
629 case GIMPLE_OMP_FOR:
630 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
631 succs edges as abnormal to prevent splitting
632 them. */
633 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
634 /* Make the loopback edge. */
635 make_edge (bb, single_succ (cur_region->entry),
636 EDGE_ABNORMAL);
637
638 /* Create an edge from GIMPLE_OMP_FOR to exit, which
639 corresponds to the case that the body of the loop
640 is not executed at all. */
641 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
642 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
643 fallthru = false;
644 break;
645
646 case GIMPLE_OMP_SECTIONS:
647 /* Wire up the edges into and out of the nested sections. */
648 {
649 basic_block switch_bb = single_succ (cur_region->entry);
650
651 struct omp_region *i;
652 for (i = cur_region->inner; i ; i = i->next)
653 {
654 gcc_assert (i->type == GIMPLE_OMP_SECTION);
655 make_edge (switch_bb, i->entry, 0);
656 make_edge (i->exit, bb, EDGE_FALLTHRU);
657 }
658
659 /* Make the loopback edge to the block with
660 GIMPLE_OMP_SECTIONS_SWITCH. */
661 make_edge (bb, switch_bb, 0);
662
663 /* Make the edge from the switch to exit. */
664 make_edge (switch_bb, bb->next_bb, 0);
665 fallthru = false;
666 }
667 break;
668
669 default:
670 gcc_unreachable ();
671 }
672 break;
673
674 default:
675 gcc_assert (!stmt_ends_bb_p (last));
676 fallthru = true;
677 }
678 }
679 else
680 fallthru = true;
681
682 if (fallthru)
683 {
684 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
685 if (last)
686 assign_discriminator (gimple_location (last), bb->next_bb);
687 }
688 }
689
690 if (root_omp_region)
691 free_omp_regions ();
692
693 /* Fold COND_EXPR_COND of each COND_EXPR. */
694 fold_cond_expr_cond ();
695 }
696
697 /* Trivial hash function for a location_t. ITEM is a pointer to
698 a hash table entry that maps a location_t to a discriminator. */
699
700 static unsigned int
701 locus_map_hash (const void *item)
702 {
703 return ((const struct locus_discrim_map *) item)->locus;
704 }
705
706 /* Equality function for the locus-to-discriminator map. VA and VB
707 point to the two hash table entries to compare. */
708
709 static int
710 locus_map_eq (const void *va, const void *vb)
711 {
712 const struct locus_discrim_map *a = (const struct locus_discrim_map *) va;
713 const struct locus_discrim_map *b = (const struct locus_discrim_map *) vb;
714 return a->locus == b->locus;
715 }
716
717 /* Find the next available discriminator value for LOCUS. The
718 discriminator distinguishes among several basic blocks that
719 share a common locus, allowing for more accurate sample-based
720 profiling. */
721
722 static int
723 next_discriminator_for_locus (location_t locus)
724 {
725 struct locus_discrim_map item;
726 struct locus_discrim_map **slot;
727
728 item.locus = locus;
729 item.discriminator = 0;
730 slot = (struct locus_discrim_map **)
731 htab_find_slot_with_hash (discriminator_per_locus, (void *) &item,
732 (hashval_t) locus, INSERT);
733 gcc_assert (slot);
734 if (*slot == HTAB_EMPTY_ENTRY)
735 {
736 *slot = XNEW (struct locus_discrim_map);
737 gcc_assert (*slot);
738 (*slot)->locus = locus;
739 (*slot)->discriminator = 0;
740 }
741 (*slot)->discriminator++;
742 return (*slot)->discriminator;
743 }
744
745 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
746
747 static bool
748 same_line_p (location_t locus1, location_t locus2)
749 {
750 expanded_location from, to;
751
752 if (locus1 == locus2)
753 return true;
754
755 from = expand_location (locus1);
756 to = expand_location (locus2);
757
758 if (from.line != to.line)
759 return false;
760 if (from.file == to.file)
761 return true;
762 return (from.file != NULL
763 && to.file != NULL
764 && strcmp (from.file, to.file) == 0);
765 }
766
767 /* Assign a unique discriminator value to block BB if it begins at the same
768 LOCUS as its predecessor block. */
769
770 static void
771 assign_discriminator (location_t locus, basic_block bb)
772 {
773 gimple first_in_to_bb, last_in_to_bb;
774
775 if (locus == 0 || bb->discriminator != 0)
776 return;
777
778 first_in_to_bb = first_non_label_stmt (bb);
779 last_in_to_bb = last_stmt (bb);
780 if ((first_in_to_bb && same_line_p (locus, gimple_location (first_in_to_bb)))
781 || (last_in_to_bb && same_line_p (locus, gimple_location (last_in_to_bb))))
782 bb->discriminator = next_discriminator_for_locus (locus);
783 }
784
785 /* Create the edges for a GIMPLE_COND starting at block BB. */
786
787 static void
788 make_cond_expr_edges (basic_block bb)
789 {
790 gimple entry = last_stmt (bb);
791 gimple then_stmt, else_stmt;
792 basic_block then_bb, else_bb;
793 tree then_label, else_label;
794 edge e;
795 location_t entry_locus;
796
797 gcc_assert (entry);
798 gcc_assert (gimple_code (entry) == GIMPLE_COND);
799
800 entry_locus = gimple_location (entry);
801
802 /* Entry basic blocks for each component. */
803 then_label = gimple_cond_true_label (entry);
804 else_label = gimple_cond_false_label (entry);
805 then_bb = label_to_block (then_label);
806 else_bb = label_to_block (else_label);
807 then_stmt = first_stmt (then_bb);
808 else_stmt = first_stmt (else_bb);
809
810 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
811 assign_discriminator (entry_locus, then_bb);
812 e->goto_locus = gimple_location (then_stmt);
813 if (e->goto_locus)
814 e->goto_block = gimple_block (then_stmt);
815 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
816 if (e)
817 {
818 assign_discriminator (entry_locus, else_bb);
819 e->goto_locus = gimple_location (else_stmt);
820 if (e->goto_locus)
821 e->goto_block = gimple_block (else_stmt);
822 }
823
824 /* We do not need the labels anymore. */
825 gimple_cond_set_true_label (entry, NULL_TREE);
826 gimple_cond_set_false_label (entry, NULL_TREE);
827 }
828
829
830 /* Called for each element in the hash table (P) as we delete the
831 edge to cases hash table.
832
833 Clear all the TREE_CHAINs to prevent problems with copying of
834 SWITCH_EXPRs and structure sharing rules, then free the hash table
835 element. */
836
837 static bool
838 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
839 void *data ATTRIBUTE_UNUSED)
840 {
841 tree t, next;
842
843 for (t = (tree) *value; t; t = next)
844 {
845 next = TREE_CHAIN (t);
846 TREE_CHAIN (t) = NULL;
847 }
848
849 *value = NULL;
850 return false;
851 }
852
853 /* Start recording information mapping edges to case labels. */
854
855 void
856 start_recording_case_labels (void)
857 {
858 gcc_assert (edge_to_cases == NULL);
859 edge_to_cases = pointer_map_create ();
860 touched_switch_bbs = BITMAP_ALLOC (NULL);
861 }
862
863 /* Return nonzero if we are recording information for case labels. */
864
865 static bool
866 recording_case_labels_p (void)
867 {
868 return (edge_to_cases != NULL);
869 }
870
871 /* Stop recording information mapping edges to case labels and
872 remove any information we have recorded. */
873 void
874 end_recording_case_labels (void)
875 {
876 bitmap_iterator bi;
877 unsigned i;
878 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
879 pointer_map_destroy (edge_to_cases);
880 edge_to_cases = NULL;
881 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
882 {
883 basic_block bb = BASIC_BLOCK (i);
884 if (bb)
885 {
886 gimple stmt = last_stmt (bb);
887 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
888 group_case_labels_stmt (stmt);
889 }
890 }
891 BITMAP_FREE (touched_switch_bbs);
892 }
893
894 /* If we are inside a {start,end}_recording_cases block, then return
895 a chain of CASE_LABEL_EXPRs from T which reference E.
896
897 Otherwise return NULL. */
898
899 static tree
900 get_cases_for_edge (edge e, gimple t)
901 {
902 void **slot;
903 size_t i, n;
904
905 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
906 chains available. Return NULL so the caller can detect this case. */
907 if (!recording_case_labels_p ())
908 return NULL;
909
910 slot = pointer_map_contains (edge_to_cases, e);
911 if (slot)
912 return (tree) *slot;
913
914 /* If we did not find E in the hash table, then this must be the first
915 time we have been queried for information about E & T. Add all the
916 elements from T to the hash table then perform the query again. */
917
918 n = gimple_switch_num_labels (t);
919 for (i = 0; i < n; i++)
920 {
921 tree elt = gimple_switch_label (t, i);
922 tree lab = CASE_LABEL (elt);
923 basic_block label_bb = label_to_block (lab);
924 edge this_edge = find_edge (e->src, label_bb);
925
926 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
927 a new chain. */
928 slot = pointer_map_insert (edge_to_cases, this_edge);
929 TREE_CHAIN (elt) = (tree) *slot;
930 *slot = elt;
931 }
932
933 return (tree) *pointer_map_contains (edge_to_cases, e);
934 }
935
936 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
937
938 static void
939 make_gimple_switch_edges (basic_block bb)
940 {
941 gimple entry = last_stmt (bb);
942 location_t entry_locus;
943 size_t i, n;
944
945 entry_locus = gimple_location (entry);
946
947 n = gimple_switch_num_labels (entry);
948
949 for (i = 0; i < n; ++i)
950 {
951 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
952 basic_block label_bb = label_to_block (lab);
953 make_edge (bb, label_bb, 0);
954 assign_discriminator (entry_locus, label_bb);
955 }
956 }
957
958
959 /* Return the basic block holding label DEST. */
960
961 basic_block
962 label_to_block_fn (struct function *ifun, tree dest)
963 {
964 int uid = LABEL_DECL_UID (dest);
965
966 /* We would die hard when faced by an undefined label. Emit a label to
967 the very first basic block. This will hopefully make even the dataflow
968 and undefined variable warnings quite right. */
969 if (seen_error () && uid < 0)
970 {
971 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
972 gimple stmt;
973
974 stmt = gimple_build_label (dest);
975 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
976 uid = LABEL_DECL_UID (dest);
977 }
978 if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
979 <= (unsigned int) uid)
980 return NULL;
981 return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
982 }
983
984 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
985 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
986
987 void
988 make_abnormal_goto_edges (basic_block bb, bool for_call)
989 {
990 basic_block target_bb;
991 gimple_stmt_iterator gsi;
992
993 FOR_EACH_BB (target_bb)
994 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
995 {
996 gimple label_stmt = gsi_stmt (gsi);
997 tree target;
998
999 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1000 break;
1001
1002 target = gimple_label_label (label_stmt);
1003
1004 /* Make an edge to every label block that has been marked as a
1005 potential target for a computed goto or a non-local goto. */
1006 if ((FORCED_LABEL (target) && !for_call)
1007 || (DECL_NONLOCAL (target) && for_call))
1008 {
1009 make_edge (bb, target_bb, EDGE_ABNORMAL);
1010 break;
1011 }
1012 }
1013 }
1014
1015 /* Create edges for a goto statement at block BB. */
1016
1017 static void
1018 make_goto_expr_edges (basic_block bb)
1019 {
1020 gimple_stmt_iterator last = gsi_last_bb (bb);
1021 gimple goto_t = gsi_stmt (last);
1022
1023 /* A simple GOTO creates normal edges. */
1024 if (simple_goto_p (goto_t))
1025 {
1026 tree dest = gimple_goto_dest (goto_t);
1027 basic_block label_bb = label_to_block (dest);
1028 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1029 e->goto_locus = gimple_location (goto_t);
1030 assign_discriminator (e->goto_locus, label_bb);
1031 if (e->goto_locus)
1032 e->goto_block = gimple_block (goto_t);
1033 gsi_remove (&last, true);
1034 return;
1035 }
1036
1037 /* A computed GOTO creates abnormal edges. */
1038 make_abnormal_goto_edges (bb, false);
1039 }
1040
1041 /* Create edges for an asm statement with labels at block BB. */
1042
1043 static void
1044 make_gimple_asm_edges (basic_block bb)
1045 {
1046 gimple stmt = last_stmt (bb);
1047 location_t stmt_loc = gimple_location (stmt);
1048 int i, n = gimple_asm_nlabels (stmt);
1049
1050 for (i = 0; i < n; ++i)
1051 {
1052 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1053 basic_block label_bb = label_to_block (label);
1054 make_edge (bb, label_bb, 0);
1055 assign_discriminator (stmt_loc, label_bb);
1056 }
1057 }
1058
1059 /*---------------------------------------------------------------------------
1060 Flowgraph analysis
1061 ---------------------------------------------------------------------------*/
1062
1063 /* Cleanup useless labels in basic blocks. This is something we wish
1064 to do early because it allows us to group case labels before creating
1065 the edges for the CFG, and it speeds up block statement iterators in
1066 all passes later on.
1067 We rerun this pass after CFG is created, to get rid of the labels that
1068 are no longer referenced. After then we do not run it any more, since
1069 (almost) no new labels should be created. */
1070
1071 /* A map from basic block index to the leading label of that block. */
1072 static struct label_record
1073 {
1074 /* The label. */
1075 tree label;
1076
1077 /* True if the label is referenced from somewhere. */
1078 bool used;
1079 } *label_for_bb;
1080
1081 /* Given LABEL return the first label in the same basic block. */
1082
1083 static tree
1084 main_block_label (tree label)
1085 {
1086 basic_block bb = label_to_block (label);
1087 tree main_label = label_for_bb[bb->index].label;
1088
1089 /* label_to_block possibly inserted undefined label into the chain. */
1090 if (!main_label)
1091 {
1092 label_for_bb[bb->index].label = label;
1093 main_label = label;
1094 }
1095
1096 label_for_bb[bb->index].used = true;
1097 return main_label;
1098 }
1099
1100 /* Clean up redundant labels within the exception tree. */
1101
1102 static void
1103 cleanup_dead_labels_eh (void)
1104 {
1105 eh_landing_pad lp;
1106 eh_region r;
1107 tree lab;
1108 int i;
1109
1110 if (cfun->eh == NULL)
1111 return;
1112
1113 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
1114 if (lp && lp->post_landing_pad)
1115 {
1116 lab = main_block_label (lp->post_landing_pad);
1117 if (lab != lp->post_landing_pad)
1118 {
1119 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1120 EH_LANDING_PAD_NR (lab) = lp->index;
1121 }
1122 }
1123
1124 FOR_ALL_EH_REGION (r)
1125 switch (r->type)
1126 {
1127 case ERT_CLEANUP:
1128 case ERT_MUST_NOT_THROW:
1129 break;
1130
1131 case ERT_TRY:
1132 {
1133 eh_catch c;
1134 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1135 {
1136 lab = c->label;
1137 if (lab)
1138 c->label = main_block_label (lab);
1139 }
1140 }
1141 break;
1142
1143 case ERT_ALLOWED_EXCEPTIONS:
1144 lab = r->u.allowed.label;
1145 if (lab)
1146 r->u.allowed.label = main_block_label (lab);
1147 break;
1148 }
1149 }
1150
1151
1152 /* Cleanup redundant labels. This is a three-step process:
1153 1) Find the leading label for each block.
1154 2) Redirect all references to labels to the leading labels.
1155 3) Cleanup all useless labels. */
1156
1157 void
1158 cleanup_dead_labels (void)
1159 {
1160 basic_block bb;
1161 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1162
1163 /* Find a suitable label for each block. We use the first user-defined
1164 label if there is one, or otherwise just the first label we see. */
1165 FOR_EACH_BB (bb)
1166 {
1167 gimple_stmt_iterator i;
1168
1169 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1170 {
1171 tree label;
1172 gimple stmt = gsi_stmt (i);
1173
1174 if (gimple_code (stmt) != GIMPLE_LABEL)
1175 break;
1176
1177 label = gimple_label_label (stmt);
1178
1179 /* If we have not yet seen a label for the current block,
1180 remember this one and see if there are more labels. */
1181 if (!label_for_bb[bb->index].label)
1182 {
1183 label_for_bb[bb->index].label = label;
1184 continue;
1185 }
1186
1187 /* If we did see a label for the current block already, but it
1188 is an artificially created label, replace it if the current
1189 label is a user defined label. */
1190 if (!DECL_ARTIFICIAL (label)
1191 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1192 {
1193 label_for_bb[bb->index].label = label;
1194 break;
1195 }
1196 }
1197 }
1198
1199 /* Now redirect all jumps/branches to the selected label.
1200 First do so for each block ending in a control statement. */
1201 FOR_EACH_BB (bb)
1202 {
1203 gimple stmt = last_stmt (bb);
1204 if (!stmt)
1205 continue;
1206
1207 switch (gimple_code (stmt))
1208 {
1209 case GIMPLE_COND:
1210 {
1211 tree true_label = gimple_cond_true_label (stmt);
1212 tree false_label = gimple_cond_false_label (stmt);
1213
1214 if (true_label)
1215 gimple_cond_set_true_label (stmt, main_block_label (true_label));
1216 if (false_label)
1217 gimple_cond_set_false_label (stmt, main_block_label (false_label));
1218 break;
1219 }
1220
1221 case GIMPLE_SWITCH:
1222 {
1223 size_t i, n = gimple_switch_num_labels (stmt);
1224
1225 /* Replace all destination labels. */
1226 for (i = 0; i < n; ++i)
1227 {
1228 tree case_label = gimple_switch_label (stmt, i);
1229 tree label = main_block_label (CASE_LABEL (case_label));
1230 CASE_LABEL (case_label) = label;
1231 }
1232 break;
1233 }
1234
1235 case GIMPLE_ASM:
1236 {
1237 int i, n = gimple_asm_nlabels (stmt);
1238
1239 for (i = 0; i < n; ++i)
1240 {
1241 tree cons = gimple_asm_label_op (stmt, i);
1242 tree label = main_block_label (TREE_VALUE (cons));
1243 TREE_VALUE (cons) = label;
1244 }
1245 break;
1246 }
1247
1248 /* We have to handle gotos until they're removed, and we don't
1249 remove them until after we've created the CFG edges. */
1250 case GIMPLE_GOTO:
1251 if (!computed_goto_p (stmt))
1252 {
1253 tree new_dest = main_block_label (gimple_goto_dest (stmt));
1254 gimple_goto_set_dest (stmt, new_dest);
1255 }
1256 break;
1257
1258 default:
1259 break;
1260 }
1261 }
1262
1263 /* Do the same for the exception region tree labels. */
1264 cleanup_dead_labels_eh ();
1265
1266 /* Finally, purge dead labels. All user-defined labels and labels that
1267 can be the target of non-local gotos and labels which have their
1268 address taken are preserved. */
1269 FOR_EACH_BB (bb)
1270 {
1271 gimple_stmt_iterator i;
1272 tree label_for_this_bb = label_for_bb[bb->index].label;
1273
1274 if (!label_for_this_bb)
1275 continue;
1276
1277 /* If the main label of the block is unused, we may still remove it. */
1278 if (!label_for_bb[bb->index].used)
1279 label_for_this_bb = NULL;
1280
1281 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1282 {
1283 tree label;
1284 gimple stmt = gsi_stmt (i);
1285
1286 if (gimple_code (stmt) != GIMPLE_LABEL)
1287 break;
1288
1289 label = gimple_label_label (stmt);
1290
1291 if (label == label_for_this_bb
1292 || !DECL_ARTIFICIAL (label)
1293 || DECL_NONLOCAL (label)
1294 || FORCED_LABEL (label))
1295 gsi_next (&i);
1296 else
1297 gsi_remove (&i, true);
1298 }
1299 }
1300
1301 free (label_for_bb);
1302 }
1303
1304 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1305 the ones jumping to the same label.
1306 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1307
1308 static void
1309 group_case_labels_stmt (gimple stmt)
1310 {
1311 int old_size = gimple_switch_num_labels (stmt);
1312 int i, j, new_size = old_size;
1313 tree default_case = NULL_TREE;
1314 tree default_label = NULL_TREE;
1315 bool has_default;
1316
1317 /* The default label is always the first case in a switch
1318 statement after gimplification if it was not optimized
1319 away */
1320 if (!CASE_LOW (gimple_switch_default_label (stmt))
1321 && !CASE_HIGH (gimple_switch_default_label (stmt)))
1322 {
1323 default_case = gimple_switch_default_label (stmt);
1324 default_label = CASE_LABEL (default_case);
1325 has_default = true;
1326 }
1327 else
1328 has_default = false;
1329
1330 /* Look for possible opportunities to merge cases. */
1331 if (has_default)
1332 i = 1;
1333 else
1334 i = 0;
1335 while (i < old_size)
1336 {
1337 tree base_case, base_label, base_high;
1338 base_case = gimple_switch_label (stmt, i);
1339
1340 gcc_assert (base_case);
1341 base_label = CASE_LABEL (base_case);
1342
1343 /* Discard cases that have the same destination as the
1344 default case. */
1345 if (base_label == default_label)
1346 {
1347 gimple_switch_set_label (stmt, i, NULL_TREE);
1348 i++;
1349 new_size--;
1350 continue;
1351 }
1352
1353 base_high = CASE_HIGH (base_case)
1354 ? CASE_HIGH (base_case)
1355 : CASE_LOW (base_case);
1356 i++;
1357
1358 /* Try to merge case labels. Break out when we reach the end
1359 of the label vector or when we cannot merge the next case
1360 label with the current one. */
1361 while (i < old_size)
1362 {
1363 tree merge_case = gimple_switch_label (stmt, i);
1364 tree merge_label = CASE_LABEL (merge_case);
1365 tree t = int_const_binop (PLUS_EXPR, base_high,
1366 integer_one_node, 1);
1367
1368 /* Merge the cases if they jump to the same place,
1369 and their ranges are consecutive. */
1370 if (merge_label == base_label
1371 && tree_int_cst_equal (CASE_LOW (merge_case), t))
1372 {
1373 base_high = CASE_HIGH (merge_case) ?
1374 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1375 CASE_HIGH (base_case) = base_high;
1376 gimple_switch_set_label (stmt, i, NULL_TREE);
1377 new_size--;
1378 i++;
1379 }
1380 else
1381 break;
1382 }
1383 }
1384
1385 /* Compress the case labels in the label vector, and adjust the
1386 length of the vector. */
1387 for (i = 0, j = 0; i < new_size; i++)
1388 {
1389 while (! gimple_switch_label (stmt, j))
1390 j++;
1391 gimple_switch_set_label (stmt, i,
1392 gimple_switch_label (stmt, j++));
1393 }
1394
1395 gcc_assert (new_size <= old_size);
1396 gimple_switch_set_num_labels (stmt, new_size);
1397 }
1398
1399 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1400 and scan the sorted vector of cases. Combine the ones jumping to the
1401 same label. */
1402
1403 void
1404 group_case_labels (void)
1405 {
1406 basic_block bb;
1407
1408 FOR_EACH_BB (bb)
1409 {
1410 gimple stmt = last_stmt (bb);
1411 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1412 group_case_labels_stmt (stmt);
1413 }
1414 }
1415
1416 /* Checks whether we can merge block B into block A. */
1417
1418 static bool
1419 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1420 {
1421 gimple stmt;
1422 gimple_stmt_iterator gsi;
1423 gimple_seq phis;
1424
1425 if (!single_succ_p (a))
1426 return false;
1427
1428 if (single_succ_edge (a)->flags & (EDGE_ABNORMAL | EDGE_EH))
1429 return false;
1430
1431 if (single_succ (a) != b)
1432 return false;
1433
1434 if (!single_pred_p (b))
1435 return false;
1436
1437 if (b == EXIT_BLOCK_PTR)
1438 return false;
1439
1440 /* If A ends by a statement causing exceptions or something similar, we
1441 cannot merge the blocks. */
1442 stmt = last_stmt (a);
1443 if (stmt && stmt_ends_bb_p (stmt))
1444 return false;
1445
1446 /* Do not allow a block with only a non-local label to be merged. */
1447 if (stmt
1448 && gimple_code (stmt) == GIMPLE_LABEL
1449 && DECL_NONLOCAL (gimple_label_label (stmt)))
1450 return false;
1451
1452 /* Examine the labels at the beginning of B. */
1453 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1454 {
1455 tree lab;
1456 stmt = gsi_stmt (gsi);
1457 if (gimple_code (stmt) != GIMPLE_LABEL)
1458 break;
1459 lab = gimple_label_label (stmt);
1460
1461 /* Do not remove user labels. */
1462 if (!DECL_ARTIFICIAL (lab))
1463 return false;
1464 }
1465
1466 /* Protect the loop latches. */
1467 if (current_loops && b->loop_father->latch == b)
1468 return false;
1469
1470 /* It must be possible to eliminate all phi nodes in B. If ssa form
1471 is not up-to-date and a name-mapping is registered, we cannot eliminate
1472 any phis. Symbols marked for renaming are never a problem though. */
1473 phis = phi_nodes (b);
1474 if (!gimple_seq_empty_p (phis)
1475 && name_mappings_registered_p ())
1476 return false;
1477
1478 /* When not optimizing, don't merge if we'd lose goto_locus. */
1479 if (!optimize
1480 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1481 {
1482 location_t goto_locus = single_succ_edge (a)->goto_locus;
1483 gimple_stmt_iterator prev, next;
1484 prev = gsi_last_nondebug_bb (a);
1485 next = gsi_after_labels (b);
1486 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1487 gsi_next_nondebug (&next);
1488 if ((gsi_end_p (prev)
1489 || gimple_location (gsi_stmt (prev)) != goto_locus)
1490 && (gsi_end_p (next)
1491 || gimple_location (gsi_stmt (next)) != goto_locus))
1492 return false;
1493 }
1494
1495 return true;
1496 }
1497
1498 /* Return true if the var whose chain of uses starts at PTR has no
1499 nondebug uses. */
1500 bool
1501 has_zero_uses_1 (const ssa_use_operand_t *head)
1502 {
1503 const ssa_use_operand_t *ptr;
1504
1505 for (ptr = head->next; ptr != head; ptr = ptr->next)
1506 if (!is_gimple_debug (USE_STMT (ptr)))
1507 return false;
1508
1509 return true;
1510 }
1511
1512 /* Return true if the var whose chain of uses starts at PTR has a
1513 single nondebug use. Set USE_P and STMT to that single nondebug
1514 use, if so, or to NULL otherwise. */
1515 bool
1516 single_imm_use_1 (const ssa_use_operand_t *head,
1517 use_operand_p *use_p, gimple *stmt)
1518 {
1519 ssa_use_operand_t *ptr, *single_use = 0;
1520
1521 for (ptr = head->next; ptr != head; ptr = ptr->next)
1522 if (!is_gimple_debug (USE_STMT (ptr)))
1523 {
1524 if (single_use)
1525 {
1526 single_use = NULL;
1527 break;
1528 }
1529 single_use = ptr;
1530 }
1531
1532 if (use_p)
1533 *use_p = single_use;
1534
1535 if (stmt)
1536 *stmt = single_use ? single_use->loc.stmt : NULL;
1537
1538 return !!single_use;
1539 }
1540
1541 /* Replaces all uses of NAME by VAL. */
1542
1543 void
1544 replace_uses_by (tree name, tree val)
1545 {
1546 imm_use_iterator imm_iter;
1547 use_operand_p use;
1548 gimple stmt;
1549 edge e;
1550
1551 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1552 {
1553 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1554 {
1555 replace_exp (use, val);
1556
1557 if (gimple_code (stmt) == GIMPLE_PHI)
1558 {
1559 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1560 if (e->flags & EDGE_ABNORMAL)
1561 {
1562 /* This can only occur for virtual operands, since
1563 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1564 would prevent replacement. */
1565 gcc_assert (!is_gimple_reg (name));
1566 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1567 }
1568 }
1569 }
1570
1571 if (gimple_code (stmt) != GIMPLE_PHI)
1572 {
1573 size_t i;
1574
1575 fold_stmt_inplace (stmt);
1576 if (cfgcleanup_altered_bbs)
1577 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1578
1579 /* FIXME. This should go in update_stmt. */
1580 for (i = 0; i < gimple_num_ops (stmt); i++)
1581 {
1582 tree op = gimple_op (stmt, i);
1583 /* Operands may be empty here. For example, the labels
1584 of a GIMPLE_COND are nulled out following the creation
1585 of the corresponding CFG edges. */
1586 if (op && TREE_CODE (op) == ADDR_EXPR)
1587 recompute_tree_invariant_for_addr_expr (op);
1588 }
1589
1590 maybe_clean_or_replace_eh_stmt (stmt, stmt);
1591 update_stmt (stmt);
1592 }
1593 }
1594
1595 gcc_assert (has_zero_uses (name));
1596
1597 /* Also update the trees stored in loop structures. */
1598 if (current_loops)
1599 {
1600 struct loop *loop;
1601 loop_iterator li;
1602
1603 FOR_EACH_LOOP (li, loop, 0)
1604 {
1605 substitute_in_loop_info (loop, name, val);
1606 }
1607 }
1608 }
1609
1610 /* Merge block B into block A. */
1611
1612 static void
1613 gimple_merge_blocks (basic_block a, basic_block b)
1614 {
1615 gimple_stmt_iterator last, gsi, psi;
1616 gimple_seq phis = phi_nodes (b);
1617
1618 if (dump_file)
1619 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1620
1621 /* Remove all single-valued PHI nodes from block B of the form
1622 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1623 gsi = gsi_last_bb (a);
1624 for (psi = gsi_start (phis); !gsi_end_p (psi); )
1625 {
1626 gimple phi = gsi_stmt (psi);
1627 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1628 gimple copy;
1629 bool may_replace_uses = !is_gimple_reg (def)
1630 || may_propagate_copy (def, use);
1631
1632 /* In case we maintain loop closed ssa form, do not propagate arguments
1633 of loop exit phi nodes. */
1634 if (current_loops
1635 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1636 && is_gimple_reg (def)
1637 && TREE_CODE (use) == SSA_NAME
1638 && a->loop_father != b->loop_father)
1639 may_replace_uses = false;
1640
1641 if (!may_replace_uses)
1642 {
1643 gcc_assert (is_gimple_reg (def));
1644
1645 /* Note that just emitting the copies is fine -- there is no problem
1646 with ordering of phi nodes. This is because A is the single
1647 predecessor of B, therefore results of the phi nodes cannot
1648 appear as arguments of the phi nodes. */
1649 copy = gimple_build_assign (def, use);
1650 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1651 remove_phi_node (&psi, false);
1652 }
1653 else
1654 {
1655 /* If we deal with a PHI for virtual operands, we can simply
1656 propagate these without fussing with folding or updating
1657 the stmt. */
1658 if (!is_gimple_reg (def))
1659 {
1660 imm_use_iterator iter;
1661 use_operand_p use_p;
1662 gimple stmt;
1663
1664 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1665 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1666 SET_USE (use_p, use);
1667
1668 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1669 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1670 }
1671 else
1672 replace_uses_by (def, use);
1673
1674 remove_phi_node (&psi, true);
1675 }
1676 }
1677
1678 /* Ensure that B follows A. */
1679 move_block_after (b, a);
1680
1681 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1682 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1683
1684 /* Remove labels from B and set gimple_bb to A for other statements. */
1685 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1686 {
1687 gimple stmt = gsi_stmt (gsi);
1688 if (gimple_code (stmt) == GIMPLE_LABEL)
1689 {
1690 tree label = gimple_label_label (stmt);
1691 int lp_nr;
1692
1693 gsi_remove (&gsi, false);
1694
1695 /* Now that we can thread computed gotos, we might have
1696 a situation where we have a forced label in block B
1697 However, the label at the start of block B might still be
1698 used in other ways (think about the runtime checking for
1699 Fortran assigned gotos). So we can not just delete the
1700 label. Instead we move the label to the start of block A. */
1701 if (FORCED_LABEL (label))
1702 {
1703 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1704 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1705 }
1706
1707 lp_nr = EH_LANDING_PAD_NR (label);
1708 if (lp_nr)
1709 {
1710 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1711 lp->post_landing_pad = NULL;
1712 }
1713 }
1714 else
1715 {
1716 gimple_set_bb (stmt, a);
1717 gsi_next (&gsi);
1718 }
1719 }
1720
1721 /* Merge the sequences. */
1722 last = gsi_last_bb (a);
1723 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1724 set_bb_seq (b, NULL);
1725
1726 if (cfgcleanup_altered_bbs)
1727 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1728 }
1729
1730
1731 /* Return the one of two successors of BB that is not reachable by a
1732 complex edge, if there is one. Else, return BB. We use
1733 this in optimizations that use post-dominators for their heuristics,
1734 to catch the cases in C++ where function calls are involved. */
1735
1736 basic_block
1737 single_noncomplex_succ (basic_block bb)
1738 {
1739 edge e0, e1;
1740 if (EDGE_COUNT (bb->succs) != 2)
1741 return bb;
1742
1743 e0 = EDGE_SUCC (bb, 0);
1744 e1 = EDGE_SUCC (bb, 1);
1745 if (e0->flags & EDGE_COMPLEX)
1746 return e1->dest;
1747 if (e1->flags & EDGE_COMPLEX)
1748 return e0->dest;
1749
1750 return bb;
1751 }
1752
1753 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1754
1755 void
1756 notice_special_calls (gimple call)
1757 {
1758 int flags = gimple_call_flags (call);
1759
1760 if (flags & ECF_MAY_BE_ALLOCA)
1761 cfun->calls_alloca = true;
1762 if (flags & ECF_RETURNS_TWICE)
1763 cfun->calls_setjmp = true;
1764 }
1765
1766
1767 /* Clear flags set by notice_special_calls. Used by dead code removal
1768 to update the flags. */
1769
1770 void
1771 clear_special_calls (void)
1772 {
1773 cfun->calls_alloca = false;
1774 cfun->calls_setjmp = false;
1775 }
1776
1777 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1778
1779 static void
1780 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1781 {
1782 /* Since this block is no longer reachable, we can just delete all
1783 of its PHI nodes. */
1784 remove_phi_nodes (bb);
1785
1786 /* Remove edges to BB's successors. */
1787 while (EDGE_COUNT (bb->succs) > 0)
1788 remove_edge (EDGE_SUCC (bb, 0));
1789 }
1790
1791
1792 /* Remove statements of basic block BB. */
1793
1794 static void
1795 remove_bb (basic_block bb)
1796 {
1797 gimple_stmt_iterator i;
1798
1799 if (dump_file)
1800 {
1801 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1802 if (dump_flags & TDF_DETAILS)
1803 {
1804 dump_bb (bb, dump_file, 0);
1805 fprintf (dump_file, "\n");
1806 }
1807 }
1808
1809 if (current_loops)
1810 {
1811 struct loop *loop = bb->loop_father;
1812
1813 /* If a loop gets removed, clean up the information associated
1814 with it. */
1815 if (loop->latch == bb
1816 || loop->header == bb)
1817 free_numbers_of_iterations_estimates_loop (loop);
1818 }
1819
1820 /* Remove all the instructions in the block. */
1821 if (bb_seq (bb) != NULL)
1822 {
1823 /* Walk backwards so as to get a chance to substitute all
1824 released DEFs into debug stmts. See
1825 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1826 details. */
1827 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1828 {
1829 gimple stmt = gsi_stmt (i);
1830 if (gimple_code (stmt) == GIMPLE_LABEL
1831 && (FORCED_LABEL (gimple_label_label (stmt))
1832 || DECL_NONLOCAL (gimple_label_label (stmt))))
1833 {
1834 basic_block new_bb;
1835 gimple_stmt_iterator new_gsi;
1836
1837 /* A non-reachable non-local label may still be referenced.
1838 But it no longer needs to carry the extra semantics of
1839 non-locality. */
1840 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1841 {
1842 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1843 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1844 }
1845
1846 new_bb = bb->prev_bb;
1847 new_gsi = gsi_start_bb (new_bb);
1848 gsi_remove (&i, false);
1849 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1850 }
1851 else
1852 {
1853 /* Release SSA definitions if we are in SSA. Note that we
1854 may be called when not in SSA. For example,
1855 final_cleanup calls this function via
1856 cleanup_tree_cfg. */
1857 if (gimple_in_ssa_p (cfun))
1858 release_defs (stmt);
1859
1860 gsi_remove (&i, true);
1861 }
1862
1863 if (gsi_end_p (i))
1864 i = gsi_last_bb (bb);
1865 else
1866 gsi_prev (&i);
1867 }
1868 }
1869
1870 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1871 bb->il.gimple = NULL;
1872 }
1873
1874
1875 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1876 predicate VAL, return the edge that will be taken out of the block.
1877 If VAL does not match a unique edge, NULL is returned. */
1878
1879 edge
1880 find_taken_edge (basic_block bb, tree val)
1881 {
1882 gimple stmt;
1883
1884 stmt = last_stmt (bb);
1885
1886 gcc_assert (stmt);
1887 gcc_assert (is_ctrl_stmt (stmt));
1888
1889 if (val == NULL)
1890 return NULL;
1891
1892 if (!is_gimple_min_invariant (val))
1893 return NULL;
1894
1895 if (gimple_code (stmt) == GIMPLE_COND)
1896 return find_taken_edge_cond_expr (bb, val);
1897
1898 if (gimple_code (stmt) == GIMPLE_SWITCH)
1899 return find_taken_edge_switch_expr (bb, val);
1900
1901 if (computed_goto_p (stmt))
1902 {
1903 /* Only optimize if the argument is a label, if the argument is
1904 not a label then we can not construct a proper CFG.
1905
1906 It may be the case that we only need to allow the LABEL_REF to
1907 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1908 appear inside a LABEL_EXPR just to be safe. */
1909 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1910 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1911 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1912 return NULL;
1913 }
1914
1915 gcc_unreachable ();
1916 }
1917
1918 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1919 statement, determine which of the outgoing edges will be taken out of the
1920 block. Return NULL if either edge may be taken. */
1921
1922 static edge
1923 find_taken_edge_computed_goto (basic_block bb, tree val)
1924 {
1925 basic_block dest;
1926 edge e = NULL;
1927
1928 dest = label_to_block (val);
1929 if (dest)
1930 {
1931 e = find_edge (bb, dest);
1932 gcc_assert (e != NULL);
1933 }
1934
1935 return e;
1936 }
1937
1938 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1939 statement, determine which of the two edges will be taken out of the
1940 block. Return NULL if either edge may be taken. */
1941
1942 static edge
1943 find_taken_edge_cond_expr (basic_block bb, tree val)
1944 {
1945 edge true_edge, false_edge;
1946
1947 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1948
1949 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1950 return (integer_zerop (val) ? false_edge : true_edge);
1951 }
1952
1953 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1954 statement, determine which edge will be taken out of the block. Return
1955 NULL if any edge may be taken. */
1956
1957 static edge
1958 find_taken_edge_switch_expr (basic_block bb, tree val)
1959 {
1960 basic_block dest_bb;
1961 edge e;
1962 gimple switch_stmt;
1963 tree taken_case;
1964
1965 switch_stmt = last_stmt (bb);
1966 taken_case = find_case_label_for_value (switch_stmt, val);
1967 dest_bb = label_to_block (CASE_LABEL (taken_case));
1968
1969 e = find_edge (bb, dest_bb);
1970 gcc_assert (e);
1971 return e;
1972 }
1973
1974
1975 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1976 We can make optimal use here of the fact that the case labels are
1977 sorted: We can do a binary search for a case matching VAL. */
1978
1979 static tree
1980 find_case_label_for_value (gimple switch_stmt, tree val)
1981 {
1982 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1983 tree default_case = gimple_switch_default_label (switch_stmt);
1984
1985 for (low = 0, high = n; high - low > 1; )
1986 {
1987 size_t i = (high + low) / 2;
1988 tree t = gimple_switch_label (switch_stmt, i);
1989 int cmp;
1990
1991 /* Cache the result of comparing CASE_LOW and val. */
1992 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1993
1994 if (cmp > 0)
1995 high = i;
1996 else
1997 low = i;
1998
1999 if (CASE_HIGH (t) == NULL)
2000 {
2001 /* A singe-valued case label. */
2002 if (cmp == 0)
2003 return t;
2004 }
2005 else
2006 {
2007 /* A case range. We can only handle integer ranges. */
2008 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2009 return t;
2010 }
2011 }
2012
2013 return default_case;
2014 }
2015
2016
2017 /* Dump a basic block on stderr. */
2018
2019 void
2020 gimple_debug_bb (basic_block bb)
2021 {
2022 gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
2023 }
2024
2025
2026 /* Dump basic block with index N on stderr. */
2027
2028 basic_block
2029 gimple_debug_bb_n (int n)
2030 {
2031 gimple_debug_bb (BASIC_BLOCK (n));
2032 return BASIC_BLOCK (n);
2033 }
2034
2035
2036 /* Dump the CFG on stderr.
2037
2038 FLAGS are the same used by the tree dumping functions
2039 (see TDF_* in tree-pass.h). */
2040
2041 void
2042 gimple_debug_cfg (int flags)
2043 {
2044 gimple_dump_cfg (stderr, flags);
2045 }
2046
2047
2048 /* Dump the program showing basic block boundaries on the given FILE.
2049
2050 FLAGS are the same used by the tree dumping functions (see TDF_* in
2051 tree.h). */
2052
2053 void
2054 gimple_dump_cfg (FILE *file, int flags)
2055 {
2056 if (flags & TDF_DETAILS)
2057 {
2058 const char *funcname
2059 = lang_hooks.decl_printable_name (current_function_decl, 2);
2060
2061 fputc ('\n', file);
2062 fprintf (file, ";; Function %s\n\n", funcname);
2063 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2064 n_basic_blocks, n_edges, last_basic_block);
2065
2066 brief_dump_cfg (file);
2067 fprintf (file, "\n");
2068 }
2069
2070 if (flags & TDF_STATS)
2071 dump_cfg_stats (file);
2072
2073 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2074 }
2075
2076
2077 /* Dump CFG statistics on FILE. */
2078
2079 void
2080 dump_cfg_stats (FILE *file)
2081 {
2082 static long max_num_merged_labels = 0;
2083 unsigned long size, total = 0;
2084 long num_edges;
2085 basic_block bb;
2086 const char * const fmt_str = "%-30s%-13s%12s\n";
2087 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2088 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2089 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2090 const char *funcname
2091 = lang_hooks.decl_printable_name (current_function_decl, 2);
2092
2093
2094 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2095
2096 fprintf (file, "---------------------------------------------------------\n");
2097 fprintf (file, fmt_str, "", " Number of ", "Memory");
2098 fprintf (file, fmt_str, "", " instances ", "used ");
2099 fprintf (file, "---------------------------------------------------------\n");
2100
2101 size = n_basic_blocks * sizeof (struct basic_block_def);
2102 total += size;
2103 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2104 SCALE (size), LABEL (size));
2105
2106 num_edges = 0;
2107 FOR_EACH_BB (bb)
2108 num_edges += EDGE_COUNT (bb->succs);
2109 size = num_edges * sizeof (struct edge_def);
2110 total += size;
2111 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2112
2113 fprintf (file, "---------------------------------------------------------\n");
2114 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2115 LABEL (total));
2116 fprintf (file, "---------------------------------------------------------\n");
2117 fprintf (file, "\n");
2118
2119 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2120 max_num_merged_labels = cfg_stats.num_merged_labels;
2121
2122 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2123 cfg_stats.num_merged_labels, max_num_merged_labels);
2124
2125 fprintf (file, "\n");
2126 }
2127
2128
2129 /* Dump CFG statistics on stderr. Keep extern so that it's always
2130 linked in the final executable. */
2131
2132 DEBUG_FUNCTION void
2133 debug_cfg_stats (void)
2134 {
2135 dump_cfg_stats (stderr);
2136 }
2137
2138
2139 /* Dump the flowgraph to a .vcg FILE. */
2140
2141 static void
2142 gimple_cfg2vcg (FILE *file)
2143 {
2144 edge e;
2145 edge_iterator ei;
2146 basic_block bb;
2147 const char *funcname
2148 = lang_hooks.decl_printable_name (current_function_decl, 2);
2149
2150 /* Write the file header. */
2151 fprintf (file, "graph: { title: \"%s\"\n", funcname);
2152 fprintf (file, "node: { title: \"ENTRY\" label: \"ENTRY\" }\n");
2153 fprintf (file, "node: { title: \"EXIT\" label: \"EXIT\" }\n");
2154
2155 /* Write blocks and edges. */
2156 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2157 {
2158 fprintf (file, "edge: { sourcename: \"ENTRY\" targetname: \"%d\"",
2159 e->dest->index);
2160
2161 if (e->flags & EDGE_FAKE)
2162 fprintf (file, " linestyle: dotted priority: 10");
2163 else
2164 fprintf (file, " linestyle: solid priority: 100");
2165
2166 fprintf (file, " }\n");
2167 }
2168 fputc ('\n', file);
2169
2170 FOR_EACH_BB (bb)
2171 {
2172 enum gimple_code head_code, end_code;
2173 const char *head_name, *end_name;
2174 int head_line = 0;
2175 int end_line = 0;
2176 gimple first = first_stmt (bb);
2177 gimple last = last_stmt (bb);
2178
2179 if (first)
2180 {
2181 head_code = gimple_code (first);
2182 head_name = gimple_code_name[head_code];
2183 head_line = get_lineno (first);
2184 }
2185 else
2186 head_name = "no-statement";
2187
2188 if (last)
2189 {
2190 end_code = gimple_code (last);
2191 end_name = gimple_code_name[end_code];
2192 end_line = get_lineno (last);
2193 }
2194 else
2195 end_name = "no-statement";
2196
2197 fprintf (file, "node: { title: \"%d\" label: \"#%d\\n%s (%d)\\n%s (%d)\"}\n",
2198 bb->index, bb->index, head_name, head_line, end_name,
2199 end_line);
2200
2201 FOR_EACH_EDGE (e, ei, bb->succs)
2202 {
2203 if (e->dest == EXIT_BLOCK_PTR)
2204 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"EXIT\"", bb->index);
2205 else
2206 fprintf (file, "edge: { sourcename: \"%d\" targetname: \"%d\"", bb->index, e->dest->index);
2207
2208 if (e->flags & EDGE_FAKE)
2209 fprintf (file, " priority: 10 linestyle: dotted");
2210 else
2211 fprintf (file, " priority: 100 linestyle: solid");
2212
2213 fprintf (file, " }\n");
2214 }
2215
2216 if (bb->next_bb != EXIT_BLOCK_PTR)
2217 fputc ('\n', file);
2218 }
2219
2220 fputs ("}\n\n", file);
2221 }
2222
2223
2224
2225 /*---------------------------------------------------------------------------
2226 Miscellaneous helpers
2227 ---------------------------------------------------------------------------*/
2228
2229 /* Return true if T represents a stmt that always transfers control. */
2230
2231 bool
2232 is_ctrl_stmt (gimple t)
2233 {
2234 switch (gimple_code (t))
2235 {
2236 case GIMPLE_COND:
2237 case GIMPLE_SWITCH:
2238 case GIMPLE_GOTO:
2239 case GIMPLE_RETURN:
2240 case GIMPLE_RESX:
2241 return true;
2242 default:
2243 return false;
2244 }
2245 }
2246
2247
2248 /* Return true if T is a statement that may alter the flow of control
2249 (e.g., a call to a non-returning function). */
2250
2251 bool
2252 is_ctrl_altering_stmt (gimple t)
2253 {
2254 gcc_assert (t);
2255
2256 switch (gimple_code (t))
2257 {
2258 case GIMPLE_CALL:
2259 {
2260 int flags = gimple_call_flags (t);
2261
2262 /* A non-pure/const call alters flow control if the current
2263 function has nonlocal labels. */
2264 if (!(flags & (ECF_CONST | ECF_PURE)) && cfun->has_nonlocal_label)
2265 return true;
2266
2267 /* A call also alters control flow if it does not return. */
2268 if (flags & ECF_NORETURN)
2269 return true;
2270
2271 /* BUILT_IN_RETURN call is same as return statement. */
2272 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2273 return true;
2274 }
2275 break;
2276
2277 case GIMPLE_EH_DISPATCH:
2278 /* EH_DISPATCH branches to the individual catch handlers at
2279 this level of a try or allowed-exceptions region. It can
2280 fallthru to the next statement as well. */
2281 return true;
2282
2283 case GIMPLE_ASM:
2284 if (gimple_asm_nlabels (t) > 0)
2285 return true;
2286 break;
2287
2288 CASE_GIMPLE_OMP:
2289 /* OpenMP directives alter control flow. */
2290 return true;
2291
2292 default:
2293 break;
2294 }
2295
2296 /* If a statement can throw, it alters control flow. */
2297 return stmt_can_throw_internal (t);
2298 }
2299
2300
2301 /* Return true if T is a simple local goto. */
2302
2303 bool
2304 simple_goto_p (gimple t)
2305 {
2306 return (gimple_code (t) == GIMPLE_GOTO
2307 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2308 }
2309
2310
2311 /* Return true if T can make an abnormal transfer of control flow.
2312 Transfers of control flow associated with EH are excluded. */
2313
2314 bool
2315 stmt_can_make_abnormal_goto (gimple t)
2316 {
2317 if (computed_goto_p (t))
2318 return true;
2319 if (is_gimple_call (t))
2320 return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
2321 return false;
2322 }
2323
2324
2325 /* Return true if STMT should start a new basic block. PREV_STMT is
2326 the statement preceding STMT. It is used when STMT is a label or a
2327 case label. Labels should only start a new basic block if their
2328 previous statement wasn't a label. Otherwise, sequence of labels
2329 would generate unnecessary basic blocks that only contain a single
2330 label. */
2331
2332 static inline bool
2333 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2334 {
2335 if (stmt == NULL)
2336 return false;
2337
2338 /* Labels start a new basic block only if the preceding statement
2339 wasn't a label of the same type. This prevents the creation of
2340 consecutive blocks that have nothing but a single label. */
2341 if (gimple_code (stmt) == GIMPLE_LABEL)
2342 {
2343 /* Nonlocal and computed GOTO targets always start a new block. */
2344 if (DECL_NONLOCAL (gimple_label_label (stmt))
2345 || FORCED_LABEL (gimple_label_label (stmt)))
2346 return true;
2347
2348 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2349 {
2350 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2351 return true;
2352
2353 cfg_stats.num_merged_labels++;
2354 return false;
2355 }
2356 else
2357 return true;
2358 }
2359
2360 return false;
2361 }
2362
2363
2364 /* Return true if T should end a basic block. */
2365
2366 bool
2367 stmt_ends_bb_p (gimple t)
2368 {
2369 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2370 }
2371
2372 /* Remove block annotations and other data structures. */
2373
2374 void
2375 delete_tree_cfg_annotations (void)
2376 {
2377 label_to_block_map = NULL;
2378 }
2379
2380
2381 /* Return the first statement in basic block BB. */
2382
2383 gimple
2384 first_stmt (basic_block bb)
2385 {
2386 gimple_stmt_iterator i = gsi_start_bb (bb);
2387 gimple stmt = NULL;
2388
2389 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2390 {
2391 gsi_next (&i);
2392 stmt = NULL;
2393 }
2394 return stmt;
2395 }
2396
2397 /* Return the first non-label statement in basic block BB. */
2398
2399 static gimple
2400 first_non_label_stmt (basic_block bb)
2401 {
2402 gimple_stmt_iterator i = gsi_start_bb (bb);
2403 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2404 gsi_next (&i);
2405 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2406 }
2407
2408 /* Return the last statement in basic block BB. */
2409
2410 gimple
2411 last_stmt (basic_block bb)
2412 {
2413 gimple_stmt_iterator i = gsi_last_bb (bb);
2414 gimple stmt = NULL;
2415
2416 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2417 {
2418 gsi_prev (&i);
2419 stmt = NULL;
2420 }
2421 return stmt;
2422 }
2423
2424 /* Return the last statement of an otherwise empty block. Return NULL
2425 if the block is totally empty, or if it contains more than one
2426 statement. */
2427
2428 gimple
2429 last_and_only_stmt (basic_block bb)
2430 {
2431 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2432 gimple last, prev;
2433
2434 if (gsi_end_p (i))
2435 return NULL;
2436
2437 last = gsi_stmt (i);
2438 gsi_prev_nondebug (&i);
2439 if (gsi_end_p (i))
2440 return last;
2441
2442 /* Empty statements should no longer appear in the instruction stream.
2443 Everything that might have appeared before should be deleted by
2444 remove_useless_stmts, and the optimizers should just gsi_remove
2445 instead of smashing with build_empty_stmt.
2446
2447 Thus the only thing that should appear here in a block containing
2448 one executable statement is a label. */
2449 prev = gsi_stmt (i);
2450 if (gimple_code (prev) == GIMPLE_LABEL)
2451 return last;
2452 else
2453 return NULL;
2454 }
2455
2456 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2457
2458 static void
2459 reinstall_phi_args (edge new_edge, edge old_edge)
2460 {
2461 edge_var_map_vector v;
2462 edge_var_map *vm;
2463 int i;
2464 gimple_stmt_iterator phis;
2465
2466 v = redirect_edge_var_map_vector (old_edge);
2467 if (!v)
2468 return;
2469
2470 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2471 VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
2472 i++, gsi_next (&phis))
2473 {
2474 gimple phi = gsi_stmt (phis);
2475 tree result = redirect_edge_var_map_result (vm);
2476 tree arg = redirect_edge_var_map_def (vm);
2477
2478 gcc_assert (result == gimple_phi_result (phi));
2479
2480 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2481 }
2482
2483 redirect_edge_var_map_clear (old_edge);
2484 }
2485
2486 /* Returns the basic block after which the new basic block created
2487 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2488 near its "logical" location. This is of most help to humans looking
2489 at debugging dumps. */
2490
2491 static basic_block
2492 split_edge_bb_loc (edge edge_in)
2493 {
2494 basic_block dest = edge_in->dest;
2495 basic_block dest_prev = dest->prev_bb;
2496
2497 if (dest_prev)
2498 {
2499 edge e = find_edge (dest_prev, dest);
2500 if (e && !(e->flags & EDGE_COMPLEX))
2501 return edge_in->src;
2502 }
2503 return dest_prev;
2504 }
2505
2506 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2507 Abort on abnormal edges. */
2508
2509 static basic_block
2510 gimple_split_edge (edge edge_in)
2511 {
2512 basic_block new_bb, after_bb, dest;
2513 edge new_edge, e;
2514
2515 /* Abnormal edges cannot be split. */
2516 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2517
2518 dest = edge_in->dest;
2519
2520 after_bb = split_edge_bb_loc (edge_in);
2521
2522 new_bb = create_empty_bb (after_bb);
2523 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2524 new_bb->count = edge_in->count;
2525 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2526 new_edge->probability = REG_BR_PROB_BASE;
2527 new_edge->count = edge_in->count;
2528
2529 e = redirect_edge_and_branch (edge_in, new_bb);
2530 gcc_assert (e == edge_in);
2531 reinstall_phi_args (new_edge, e);
2532
2533 return new_bb;
2534 }
2535
2536 /* Callback for walk_tree, check that all elements with address taken are
2537 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2538 inside a PHI node. */
2539
2540 static tree
2541 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2542 {
2543 tree t = *tp, x;
2544
2545 if (TYPE_P (t))
2546 *walk_subtrees = 0;
2547
2548 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2549 #define CHECK_OP(N, MSG) \
2550 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2551 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2552
2553 switch (TREE_CODE (t))
2554 {
2555 case SSA_NAME:
2556 if (SSA_NAME_IN_FREE_LIST (t))
2557 {
2558 error ("SSA name in freelist but still referenced");
2559 return *tp;
2560 }
2561 break;
2562
2563 case INDIRECT_REF:
2564 x = TREE_OPERAND (t, 0);
2565 if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
2566 {
2567 error ("Indirect reference's operand is not a register or a constant.");
2568 return x;
2569 }
2570 break;
2571
2572 case ASSERT_EXPR:
2573 x = fold (ASSERT_EXPR_COND (t));
2574 if (x == boolean_false_node)
2575 {
2576 error ("ASSERT_EXPR with an always-false condition");
2577 return *tp;
2578 }
2579 break;
2580
2581 case MODIFY_EXPR:
2582 error ("MODIFY_EXPR not expected while having tuples.");
2583 return *tp;
2584
2585 case ADDR_EXPR:
2586 {
2587 bool old_constant;
2588 bool old_side_effects;
2589 bool new_constant;
2590 bool new_side_effects;
2591
2592 gcc_assert (is_gimple_address (t));
2593
2594 old_constant = TREE_CONSTANT (t);
2595 old_side_effects = TREE_SIDE_EFFECTS (t);
2596
2597 recompute_tree_invariant_for_addr_expr (t);
2598 new_side_effects = TREE_SIDE_EFFECTS (t);
2599 new_constant = TREE_CONSTANT (t);
2600
2601 if (old_constant != new_constant)
2602 {
2603 error ("constant not recomputed when ADDR_EXPR changed");
2604 return t;
2605 }
2606 if (old_side_effects != new_side_effects)
2607 {
2608 error ("side effects not recomputed when ADDR_EXPR changed");
2609 return t;
2610 }
2611
2612 /* Skip any references (they will be checked when we recurse down the
2613 tree) and ensure that any variable used as a prefix is marked
2614 addressable. */
2615 for (x = TREE_OPERAND (t, 0);
2616 handled_component_p (x);
2617 x = TREE_OPERAND (x, 0))
2618 ;
2619
2620 if (!(TREE_CODE (x) == VAR_DECL
2621 || TREE_CODE (x) == PARM_DECL
2622 || TREE_CODE (x) == RESULT_DECL))
2623 return NULL;
2624 if (!TREE_ADDRESSABLE (x))
2625 {
2626 error ("address taken, but ADDRESSABLE bit not set");
2627 return x;
2628 }
2629 if (DECL_GIMPLE_REG_P (x))
2630 {
2631 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2632 return x;
2633 }
2634
2635 break;
2636 }
2637
2638 case COND_EXPR:
2639 x = COND_EXPR_COND (t);
2640 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2641 {
2642 error ("non-integral used in condition");
2643 return x;
2644 }
2645 if (!is_gimple_condexpr (x))
2646 {
2647 error ("invalid conditional operand");
2648 return x;
2649 }
2650 break;
2651
2652 case NON_LVALUE_EXPR:
2653 gcc_unreachable ();
2654
2655 CASE_CONVERT:
2656 case FIX_TRUNC_EXPR:
2657 case FLOAT_EXPR:
2658 case NEGATE_EXPR:
2659 case ABS_EXPR:
2660 case BIT_NOT_EXPR:
2661 case TRUTH_NOT_EXPR:
2662 CHECK_OP (0, "invalid operand to unary operator");
2663 break;
2664
2665 case REALPART_EXPR:
2666 case IMAGPART_EXPR:
2667 case COMPONENT_REF:
2668 case ARRAY_REF:
2669 case ARRAY_RANGE_REF:
2670 case BIT_FIELD_REF:
2671 case VIEW_CONVERT_EXPR:
2672 /* We have a nest of references. Verify that each of the operands
2673 that determine where to reference is either a constant or a variable,
2674 verify that the base is valid, and then show we've already checked
2675 the subtrees. */
2676 while (handled_component_p (t))
2677 {
2678 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2679 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2680 else if (TREE_CODE (t) == ARRAY_REF
2681 || TREE_CODE (t) == ARRAY_RANGE_REF)
2682 {
2683 CHECK_OP (1, "invalid array index");
2684 if (TREE_OPERAND (t, 2))
2685 CHECK_OP (2, "invalid array lower bound");
2686 if (TREE_OPERAND (t, 3))
2687 CHECK_OP (3, "invalid array stride");
2688 }
2689 else if (TREE_CODE (t) == BIT_FIELD_REF)
2690 {
2691 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2692 || !host_integerp (TREE_OPERAND (t, 2), 1))
2693 {
2694 error ("invalid position or size operand to BIT_FIELD_REF");
2695 return t;
2696 }
2697 else if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2698 && (TYPE_PRECISION (TREE_TYPE (t))
2699 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2700 {
2701 error ("integral result type precision does not match "
2702 "field size of BIT_FIELD_REF");
2703 return t;
2704 }
2705 if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2706 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2707 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2708 {
2709 error ("mode precision of non-integral result does not "
2710 "match field size of BIT_FIELD_REF");
2711 return t;
2712 }
2713 }
2714
2715 t = TREE_OPERAND (t, 0);
2716 }
2717
2718 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2719 {
2720 error ("invalid reference prefix");
2721 return t;
2722 }
2723 *walk_subtrees = 0;
2724 break;
2725 case PLUS_EXPR:
2726 case MINUS_EXPR:
2727 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2728 POINTER_PLUS_EXPR. */
2729 if (POINTER_TYPE_P (TREE_TYPE (t)))
2730 {
2731 error ("invalid operand to plus/minus, type is a pointer");
2732 return t;
2733 }
2734 CHECK_OP (0, "invalid operand to binary operator");
2735 CHECK_OP (1, "invalid operand to binary operator");
2736 break;
2737
2738 case POINTER_PLUS_EXPR:
2739 /* Check to make sure the first operand is a pointer or reference type. */
2740 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2741 {
2742 error ("invalid operand to pointer plus, first operand is not a pointer");
2743 return t;
2744 }
2745 /* Check to make sure the second operand is an integer with type of
2746 sizetype. */
2747 if (!useless_type_conversion_p (sizetype,
2748 TREE_TYPE (TREE_OPERAND (t, 1))))
2749 {
2750 error ("invalid operand to pointer plus, second operand is not an "
2751 "integer with type of sizetype.");
2752 return t;
2753 }
2754 /* FALLTHROUGH */
2755 case LT_EXPR:
2756 case LE_EXPR:
2757 case GT_EXPR:
2758 case GE_EXPR:
2759 case EQ_EXPR:
2760 case NE_EXPR:
2761 case UNORDERED_EXPR:
2762 case ORDERED_EXPR:
2763 case UNLT_EXPR:
2764 case UNLE_EXPR:
2765 case UNGT_EXPR:
2766 case UNGE_EXPR:
2767 case UNEQ_EXPR:
2768 case LTGT_EXPR:
2769 case MULT_EXPR:
2770 case TRUNC_DIV_EXPR:
2771 case CEIL_DIV_EXPR:
2772 case FLOOR_DIV_EXPR:
2773 case ROUND_DIV_EXPR:
2774 case TRUNC_MOD_EXPR:
2775 case CEIL_MOD_EXPR:
2776 case FLOOR_MOD_EXPR:
2777 case ROUND_MOD_EXPR:
2778 case RDIV_EXPR:
2779 case EXACT_DIV_EXPR:
2780 case MIN_EXPR:
2781 case MAX_EXPR:
2782 case LSHIFT_EXPR:
2783 case RSHIFT_EXPR:
2784 case LROTATE_EXPR:
2785 case RROTATE_EXPR:
2786 case BIT_IOR_EXPR:
2787 case BIT_XOR_EXPR:
2788 case BIT_AND_EXPR:
2789 CHECK_OP (0, "invalid operand to binary operator");
2790 CHECK_OP (1, "invalid operand to binary operator");
2791 break;
2792
2793 case CONSTRUCTOR:
2794 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2795 *walk_subtrees = 0;
2796 break;
2797
2798 default:
2799 break;
2800 }
2801 return NULL;
2802
2803 #undef CHECK_OP
2804 }
2805
2806
2807 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2808 Returns true if there is an error, otherwise false. */
2809
2810 static bool
2811 verify_types_in_gimple_min_lval (tree expr)
2812 {
2813 tree op;
2814
2815 if (is_gimple_id (expr))
2816 return false;
2817
2818 if (!INDIRECT_REF_P (expr)
2819 && TREE_CODE (expr) != TARGET_MEM_REF)
2820 {
2821 error ("invalid expression for min lvalue");
2822 return true;
2823 }
2824
2825 /* TARGET_MEM_REFs are strange beasts. */
2826 if (TREE_CODE (expr) == TARGET_MEM_REF)
2827 return false;
2828
2829 op = TREE_OPERAND (expr, 0);
2830 if (!is_gimple_val (op))
2831 {
2832 error ("invalid operand in indirect reference");
2833 debug_generic_stmt (op);
2834 return true;
2835 }
2836 if (!useless_type_conversion_p (TREE_TYPE (expr),
2837 TREE_TYPE (TREE_TYPE (op))))
2838 {
2839 error ("type mismatch in indirect reference");
2840 debug_generic_stmt (TREE_TYPE (expr));
2841 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2842 return true;
2843 }
2844
2845 return false;
2846 }
2847
2848 /* Verify if EXPR is a valid GIMPLE reference expression. If
2849 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2850 if there is an error, otherwise false. */
2851
2852 static bool
2853 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2854 {
2855 while (handled_component_p (expr))
2856 {
2857 tree op = TREE_OPERAND (expr, 0);
2858
2859 if (TREE_CODE (expr) == ARRAY_REF
2860 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2861 {
2862 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2863 || (TREE_OPERAND (expr, 2)
2864 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2865 || (TREE_OPERAND (expr, 3)
2866 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2867 {
2868 error ("invalid operands to array reference");
2869 debug_generic_stmt (expr);
2870 return true;
2871 }
2872 }
2873
2874 /* Verify if the reference array element types are compatible. */
2875 if (TREE_CODE (expr) == ARRAY_REF
2876 && !useless_type_conversion_p (TREE_TYPE (expr),
2877 TREE_TYPE (TREE_TYPE (op))))
2878 {
2879 error ("type mismatch in array reference");
2880 debug_generic_stmt (TREE_TYPE (expr));
2881 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2882 return true;
2883 }
2884 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2885 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2886 TREE_TYPE (TREE_TYPE (op))))
2887 {
2888 error ("type mismatch in array range reference");
2889 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2890 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2891 return true;
2892 }
2893
2894 if ((TREE_CODE (expr) == REALPART_EXPR
2895 || TREE_CODE (expr) == IMAGPART_EXPR)
2896 && !useless_type_conversion_p (TREE_TYPE (expr),
2897 TREE_TYPE (TREE_TYPE (op))))
2898 {
2899 error ("type mismatch in real/imagpart reference");
2900 debug_generic_stmt (TREE_TYPE (expr));
2901 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2902 return true;
2903 }
2904
2905 if (TREE_CODE (expr) == COMPONENT_REF
2906 && !useless_type_conversion_p (TREE_TYPE (expr),
2907 TREE_TYPE (TREE_OPERAND (expr, 1))))
2908 {
2909 error ("type mismatch in component reference");
2910 debug_generic_stmt (TREE_TYPE (expr));
2911 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2912 return true;
2913 }
2914
2915 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2916 {
2917 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2918 that their operand is not an SSA name or an invariant when
2919 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2920 bug). Otherwise there is nothing to verify, gross mismatches at
2921 most invoke undefined behavior. */
2922 if (require_lvalue
2923 && (TREE_CODE (op) == SSA_NAME
2924 || is_gimple_min_invariant (op)))
2925 {
2926 error ("Conversion of an SSA_NAME on the left hand side.");
2927 debug_generic_stmt (expr);
2928 return true;
2929 }
2930 else if (!handled_component_p (op))
2931 return false;
2932 }
2933
2934 expr = op;
2935 }
2936
2937 return ((require_lvalue || !is_gimple_min_invariant (expr))
2938 && verify_types_in_gimple_min_lval (expr));
2939 }
2940
2941 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2942 list of pointer-to types that is trivially convertible to DEST. */
2943
2944 static bool
2945 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2946 {
2947 tree src;
2948
2949 if (!TYPE_POINTER_TO (src_obj))
2950 return true;
2951
2952 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2953 if (useless_type_conversion_p (dest, src))
2954 return true;
2955
2956 return false;
2957 }
2958
2959 /* Return true if TYPE1 is a fixed-point type and if conversions to and
2960 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
2961
2962 static bool
2963 valid_fixed_convert_types_p (tree type1, tree type2)
2964 {
2965 return (FIXED_POINT_TYPE_P (type1)
2966 && (INTEGRAL_TYPE_P (type2)
2967 || SCALAR_FLOAT_TYPE_P (type2)
2968 || FIXED_POINT_TYPE_P (type2)));
2969 }
2970
2971 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
2972 is a problem, otherwise false. */
2973
2974 static bool
2975 verify_gimple_call (gimple stmt)
2976 {
2977 tree fn = gimple_call_fn (stmt);
2978 tree fntype;
2979 unsigned i;
2980
2981 if (TREE_CODE (fn) != OBJ_TYPE_REF
2982 && !is_gimple_val (fn))
2983 {
2984 error ("invalid function in gimple call");
2985 debug_generic_stmt (fn);
2986 return true;
2987 }
2988
2989 if (!POINTER_TYPE_P (TREE_TYPE (fn))
2990 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
2991 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE))
2992 {
2993 error ("non-function in gimple call");
2994 return true;
2995 }
2996
2997 if (gimple_call_lhs (stmt)
2998 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
2999 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3000 {
3001 error ("invalid LHS in gimple call");
3002 return true;
3003 }
3004
3005 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3006 {
3007 error ("LHS in noreturn call");
3008 return true;
3009 }
3010
3011 fntype = TREE_TYPE (TREE_TYPE (fn));
3012 if (gimple_call_lhs (stmt)
3013 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3014 TREE_TYPE (fntype))
3015 /* ??? At least C++ misses conversions at assignments from
3016 void * call results.
3017 ??? Java is completely off. Especially with functions
3018 returning java.lang.Object.
3019 For now simply allow arbitrary pointer type conversions. */
3020 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3021 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3022 {
3023 error ("invalid conversion in gimple call");
3024 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3025 debug_generic_stmt (TREE_TYPE (fntype));
3026 return true;
3027 }
3028
3029 if (gimple_call_chain (stmt)
3030 && !is_gimple_val (gimple_call_chain (stmt)))
3031 {
3032 error ("invalid static chain in gimple call");
3033 debug_generic_stmt (gimple_call_chain (stmt));
3034 return true;
3035 }
3036
3037 /* If there is a static chain argument, this should not be an indirect
3038 call, and the decl should have DECL_STATIC_CHAIN set. */
3039 if (gimple_call_chain (stmt))
3040 {
3041 if (TREE_CODE (fn) != ADDR_EXPR
3042 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
3043 {
3044 error ("static chain in indirect gimple call");
3045 return true;
3046 }
3047 fn = TREE_OPERAND (fn, 0);
3048
3049 if (!DECL_STATIC_CHAIN (fn))
3050 {
3051 error ("static chain with function that doesn't use one");
3052 return true;
3053 }
3054 }
3055
3056 /* ??? The C frontend passes unpromoted arguments in case it
3057 didn't see a function declaration before the call. So for now
3058 leave the call arguments mostly unverified. Once we gimplify
3059 unit-at-a-time we have a chance to fix this. */
3060
3061 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3062 {
3063 tree arg = gimple_call_arg (stmt, i);
3064 if (!is_gimple_operand (arg))
3065 {
3066 error ("invalid argument to gimple call");
3067 debug_generic_expr (arg);
3068 }
3069 }
3070
3071 return false;
3072 }
3073
3074 /* Verifies the gimple comparison with the result type TYPE and
3075 the operands OP0 and OP1. */
3076
3077 static bool
3078 verify_gimple_comparison (tree type, tree op0, tree op1)
3079 {
3080 tree op0_type = TREE_TYPE (op0);
3081 tree op1_type = TREE_TYPE (op1);
3082
3083 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3084 {
3085 error ("invalid operands in gimple comparison");
3086 return true;
3087 }
3088
3089 /* For comparisons we do not have the operations type as the
3090 effective type the comparison is carried out in. Instead
3091 we require that either the first operand is trivially
3092 convertible into the second, or the other way around.
3093 The resulting type of a comparison may be any integral type.
3094 Because we special-case pointers to void we allow
3095 comparisons of pointers with the same mode as well. */
3096 if ((!useless_type_conversion_p (op0_type, op1_type)
3097 && !useless_type_conversion_p (op1_type, op0_type)
3098 && (!POINTER_TYPE_P (op0_type)
3099 || !POINTER_TYPE_P (op1_type)
3100 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3101 || !INTEGRAL_TYPE_P (type))
3102 {
3103 error ("type mismatch in comparison expression");
3104 debug_generic_expr (type);
3105 debug_generic_expr (op0_type);
3106 debug_generic_expr (op1_type);
3107 return true;
3108 }
3109
3110 return false;
3111 }
3112
3113 /* Verify a gimple assignment statement STMT with an unary rhs.
3114 Returns true if anything is wrong. */
3115
3116 static bool
3117 verify_gimple_assign_unary (gimple stmt)
3118 {
3119 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3120 tree lhs = gimple_assign_lhs (stmt);
3121 tree lhs_type = TREE_TYPE (lhs);
3122 tree rhs1 = gimple_assign_rhs1 (stmt);
3123 tree rhs1_type = TREE_TYPE (rhs1);
3124
3125 if (!is_gimple_reg (lhs)
3126 && !(optimize == 0
3127 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3128 {
3129 error ("non-register as LHS of unary operation");
3130 return true;
3131 }
3132
3133 if (!is_gimple_val (rhs1))
3134 {
3135 error ("invalid operand in unary operation");
3136 return true;
3137 }
3138
3139 /* First handle conversions. */
3140 switch (rhs_code)
3141 {
3142 CASE_CONVERT:
3143 {
3144 /* Allow conversions between integral types and pointers only if
3145 there is no sign or zero extension involved.
3146 For targets were the precision of sizetype doesn't match that
3147 of pointers we need to allow arbitrary conversions from and
3148 to sizetype. */
3149 if ((POINTER_TYPE_P (lhs_type)
3150 && INTEGRAL_TYPE_P (rhs1_type)
3151 && (TYPE_PRECISION (lhs_type) >= TYPE_PRECISION (rhs1_type)
3152 || rhs1_type == sizetype))
3153 || (POINTER_TYPE_P (rhs1_type)
3154 && INTEGRAL_TYPE_P (lhs_type)
3155 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3156 || lhs_type == sizetype)))
3157 return false;
3158
3159 /* Allow conversion from integer to offset type and vice versa. */
3160 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3161 && TREE_CODE (rhs1_type) == INTEGER_TYPE)
3162 || (TREE_CODE (lhs_type) == INTEGER_TYPE
3163 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3164 return false;
3165
3166 /* Otherwise assert we are converting between types of the
3167 same kind. */
3168 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3169 {
3170 error ("invalid types in nop conversion");
3171 debug_generic_expr (lhs_type);
3172 debug_generic_expr (rhs1_type);
3173 return true;
3174 }
3175
3176 return false;
3177 }
3178
3179 case ADDR_SPACE_CONVERT_EXPR:
3180 {
3181 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3182 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3183 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3184 {
3185 error ("invalid types in address space conversion");
3186 debug_generic_expr (lhs_type);
3187 debug_generic_expr (rhs1_type);
3188 return true;
3189 }
3190
3191 return false;
3192 }
3193
3194 case FIXED_CONVERT_EXPR:
3195 {
3196 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3197 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3198 {
3199 error ("invalid types in fixed-point conversion");
3200 debug_generic_expr (lhs_type);
3201 debug_generic_expr (rhs1_type);
3202 return true;
3203 }
3204
3205 return false;
3206 }
3207
3208 case FLOAT_EXPR:
3209 {
3210 if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3211 {
3212 error ("invalid types in conversion to floating point");
3213 debug_generic_expr (lhs_type);
3214 debug_generic_expr (rhs1_type);
3215 return true;
3216 }
3217
3218 return false;
3219 }
3220
3221 case FIX_TRUNC_EXPR:
3222 {
3223 if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3224 {
3225 error ("invalid types in conversion to integer");
3226 debug_generic_expr (lhs_type);
3227 debug_generic_expr (rhs1_type);
3228 return true;
3229 }
3230
3231 return false;
3232 }
3233
3234 case VEC_UNPACK_HI_EXPR:
3235 case VEC_UNPACK_LO_EXPR:
3236 case REDUC_MAX_EXPR:
3237 case REDUC_MIN_EXPR:
3238 case REDUC_PLUS_EXPR:
3239 case VEC_UNPACK_FLOAT_HI_EXPR:
3240 case VEC_UNPACK_FLOAT_LO_EXPR:
3241 /* FIXME. */
3242 return false;
3243
3244 case TRUTH_NOT_EXPR:
3245 case NEGATE_EXPR:
3246 case ABS_EXPR:
3247 case BIT_NOT_EXPR:
3248 case PAREN_EXPR:
3249 case NON_LVALUE_EXPR:
3250 case CONJ_EXPR:
3251 break;
3252
3253 default:
3254 gcc_unreachable ();
3255 }
3256
3257 /* For the remaining codes assert there is no conversion involved. */
3258 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3259 {
3260 error ("non-trivial conversion in unary operation");
3261 debug_generic_expr (lhs_type);
3262 debug_generic_expr (rhs1_type);
3263 return true;
3264 }
3265
3266 return false;
3267 }
3268
3269 /* Verify a gimple assignment statement STMT with a binary rhs.
3270 Returns true if anything is wrong. */
3271
3272 static bool
3273 verify_gimple_assign_binary (gimple stmt)
3274 {
3275 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3276 tree lhs = gimple_assign_lhs (stmt);
3277 tree lhs_type = TREE_TYPE (lhs);
3278 tree rhs1 = gimple_assign_rhs1 (stmt);
3279 tree rhs1_type = TREE_TYPE (rhs1);
3280 tree rhs2 = gimple_assign_rhs2 (stmt);
3281 tree rhs2_type = TREE_TYPE (rhs2);
3282
3283 if (!is_gimple_reg (lhs)
3284 && !(optimize == 0
3285 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3286 {
3287 error ("non-register as LHS of binary operation");
3288 return true;
3289 }
3290
3291 if (!is_gimple_val (rhs1)
3292 || !is_gimple_val (rhs2))
3293 {
3294 error ("invalid operands in binary operation");
3295 return true;
3296 }
3297
3298 /* First handle operations that involve different types. */
3299 switch (rhs_code)
3300 {
3301 case COMPLEX_EXPR:
3302 {
3303 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3304 || !(INTEGRAL_TYPE_P (rhs1_type)
3305 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3306 || !(INTEGRAL_TYPE_P (rhs2_type)
3307 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3308 {
3309 error ("type mismatch in complex expression");
3310 debug_generic_expr (lhs_type);
3311 debug_generic_expr (rhs1_type);
3312 debug_generic_expr (rhs2_type);
3313 return true;
3314 }
3315
3316 return false;
3317 }
3318
3319 case LSHIFT_EXPR:
3320 case RSHIFT_EXPR:
3321 case LROTATE_EXPR:
3322 case RROTATE_EXPR:
3323 {
3324 /* Shifts and rotates are ok on integral types, fixed point
3325 types and integer vector types. */
3326 if ((!INTEGRAL_TYPE_P (rhs1_type)
3327 && !FIXED_POINT_TYPE_P (rhs1_type)
3328 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3329 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3330 || (!INTEGRAL_TYPE_P (rhs2_type)
3331 /* Vector shifts of vectors are also ok. */
3332 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3333 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3334 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3335 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3336 || !useless_type_conversion_p (lhs_type, rhs1_type))
3337 {
3338 error ("type mismatch in shift expression");
3339 debug_generic_expr (lhs_type);
3340 debug_generic_expr (rhs1_type);
3341 debug_generic_expr (rhs2_type);
3342 return true;
3343 }
3344
3345 return false;
3346 }
3347
3348 case VEC_LSHIFT_EXPR:
3349 case VEC_RSHIFT_EXPR:
3350 {
3351 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3352 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3353 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3354 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3355 || (!INTEGRAL_TYPE_P (rhs2_type)
3356 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3357 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3358 || !useless_type_conversion_p (lhs_type, rhs1_type))
3359 {
3360 error ("type mismatch in vector shift expression");
3361 debug_generic_expr (lhs_type);
3362 debug_generic_expr (rhs1_type);
3363 debug_generic_expr (rhs2_type);
3364 return true;
3365 }
3366 /* For shifting a vector of floating point components we
3367 only allow shifting by a constant multiple of the element size. */
3368 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
3369 && (TREE_CODE (rhs2) != INTEGER_CST
3370 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3371 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3372 {
3373 error ("non-element sized vector shift of floating point vector");
3374 return true;
3375 }
3376
3377 return false;
3378 }
3379
3380 case PLUS_EXPR:
3381 {
3382 /* We use regular PLUS_EXPR for vectors.
3383 ??? This just makes the checker happy and may not be what is
3384 intended. */
3385 if (TREE_CODE (lhs_type) == VECTOR_TYPE
3386 && POINTER_TYPE_P (TREE_TYPE (lhs_type)))
3387 {
3388 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3389 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3390 {
3391 error ("invalid non-vector operands to vector valued plus");
3392 return true;
3393 }
3394 lhs_type = TREE_TYPE (lhs_type);
3395 rhs1_type = TREE_TYPE (rhs1_type);
3396 rhs2_type = TREE_TYPE (rhs2_type);
3397 /* PLUS_EXPR is commutative, so we might end up canonicalizing
3398 the pointer to 2nd place. */
3399 if (POINTER_TYPE_P (rhs2_type))
3400 {
3401 tree tem = rhs1_type;
3402 rhs1_type = rhs2_type;
3403 rhs2_type = tem;
3404 }
3405 goto do_pointer_plus_expr_check;
3406 }
3407 }
3408 /* Fallthru. */
3409 case MINUS_EXPR:
3410 {
3411 if (POINTER_TYPE_P (lhs_type)
3412 || POINTER_TYPE_P (rhs1_type)
3413 || POINTER_TYPE_P (rhs2_type))
3414 {
3415 error ("invalid (pointer) operands to plus/minus");
3416 return true;
3417 }
3418
3419 /* Continue with generic binary expression handling. */
3420 break;
3421 }
3422
3423 case POINTER_PLUS_EXPR:
3424 {
3425 do_pointer_plus_expr_check:
3426 if (!POINTER_TYPE_P (rhs1_type)
3427 || !useless_type_conversion_p (lhs_type, rhs1_type)
3428 || !useless_type_conversion_p (sizetype, rhs2_type))
3429 {
3430 error ("type mismatch in pointer plus expression");
3431 debug_generic_stmt (lhs_type);
3432 debug_generic_stmt (rhs1_type);
3433 debug_generic_stmt (rhs2_type);
3434 return true;
3435 }
3436
3437 return false;
3438 }
3439
3440 case TRUTH_ANDIF_EXPR:
3441 case TRUTH_ORIF_EXPR:
3442 gcc_unreachable ();
3443
3444 case TRUTH_AND_EXPR:
3445 case TRUTH_OR_EXPR:
3446 case TRUTH_XOR_EXPR:
3447 {
3448 /* We allow any kind of integral typed argument and result. */
3449 if (!INTEGRAL_TYPE_P (rhs1_type)
3450 || !INTEGRAL_TYPE_P (rhs2_type)
3451 || !INTEGRAL_TYPE_P (lhs_type))
3452 {
3453 error ("type mismatch in binary truth expression");
3454 debug_generic_expr (lhs_type);
3455 debug_generic_expr (rhs1_type);
3456 debug_generic_expr (rhs2_type);
3457 return true;
3458 }
3459
3460 return false;
3461 }
3462
3463 case LT_EXPR:
3464 case LE_EXPR:
3465 case GT_EXPR:
3466 case GE_EXPR:
3467 case EQ_EXPR:
3468 case NE_EXPR:
3469 case UNORDERED_EXPR:
3470 case ORDERED_EXPR:
3471 case UNLT_EXPR:
3472 case UNLE_EXPR:
3473 case UNGT_EXPR:
3474 case UNGE_EXPR:
3475 case UNEQ_EXPR:
3476 case LTGT_EXPR:
3477 /* Comparisons are also binary, but the result type is not
3478 connected to the operand types. */
3479 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3480
3481 case WIDEN_MULT_EXPR:
3482 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3483 return true;
3484 return ((2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type))
3485 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3486
3487 case WIDEN_SUM_EXPR:
3488 case VEC_WIDEN_MULT_HI_EXPR:
3489 case VEC_WIDEN_MULT_LO_EXPR:
3490 case VEC_PACK_TRUNC_EXPR:
3491 case VEC_PACK_SAT_EXPR:
3492 case VEC_PACK_FIX_TRUNC_EXPR:
3493 case VEC_EXTRACT_EVEN_EXPR:
3494 case VEC_EXTRACT_ODD_EXPR:
3495 case VEC_INTERLEAVE_HIGH_EXPR:
3496 case VEC_INTERLEAVE_LOW_EXPR:
3497 /* FIXME. */
3498 return false;
3499
3500 case MULT_EXPR:
3501 case TRUNC_DIV_EXPR:
3502 case CEIL_DIV_EXPR:
3503 case FLOOR_DIV_EXPR:
3504 case ROUND_DIV_EXPR:
3505 case TRUNC_MOD_EXPR:
3506 case CEIL_MOD_EXPR:
3507 case FLOOR_MOD_EXPR:
3508 case ROUND_MOD_EXPR:
3509 case RDIV_EXPR:
3510 case EXACT_DIV_EXPR:
3511 case MIN_EXPR:
3512 case MAX_EXPR:
3513 case BIT_IOR_EXPR:
3514 case BIT_XOR_EXPR:
3515 case BIT_AND_EXPR:
3516 /* Continue with generic binary expression handling. */
3517 break;
3518
3519 default:
3520 gcc_unreachable ();
3521 }
3522
3523 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3524 || !useless_type_conversion_p (lhs_type, rhs2_type))
3525 {
3526 error ("type mismatch in binary expression");
3527 debug_generic_stmt (lhs_type);
3528 debug_generic_stmt (rhs1_type);
3529 debug_generic_stmt (rhs2_type);
3530 return true;
3531 }
3532
3533 return false;
3534 }
3535
3536 /* Verify a gimple assignment statement STMT with a ternary rhs.
3537 Returns true if anything is wrong. */
3538
3539 static bool
3540 verify_gimple_assign_ternary (gimple stmt)
3541 {
3542 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3543 tree lhs = gimple_assign_lhs (stmt);
3544 tree lhs_type = TREE_TYPE (lhs);
3545 tree rhs1 = gimple_assign_rhs1 (stmt);
3546 tree rhs1_type = TREE_TYPE (rhs1);
3547 tree rhs2 = gimple_assign_rhs2 (stmt);
3548 tree rhs2_type = TREE_TYPE (rhs2);
3549 tree rhs3 = gimple_assign_rhs3 (stmt);
3550 tree rhs3_type = TREE_TYPE (rhs3);
3551
3552 if (!is_gimple_reg (lhs)
3553 && !(optimize == 0
3554 && TREE_CODE (lhs_type) == COMPLEX_TYPE))
3555 {
3556 error ("non-register as LHS of ternary operation");
3557 return true;
3558 }
3559
3560 if (!is_gimple_val (rhs1)
3561 || !is_gimple_val (rhs2)
3562 || !is_gimple_val (rhs3))
3563 {
3564 error ("invalid operands in ternary operation");
3565 return true;
3566 }
3567
3568 /* First handle operations that involve different types. */
3569 switch (rhs_code)
3570 {
3571 case WIDEN_MULT_PLUS_EXPR:
3572 case WIDEN_MULT_MINUS_EXPR:
3573 if ((!INTEGRAL_TYPE_P (rhs1_type)
3574 && !FIXED_POINT_TYPE_P (rhs1_type))
3575 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3576 || !useless_type_conversion_p (lhs_type, rhs3_type)
3577 || 2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type)
3578 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3579 {
3580 error ("type mismatch in widening multiply-accumulate expression");
3581 debug_generic_expr (lhs_type);
3582 debug_generic_expr (rhs1_type);
3583 debug_generic_expr (rhs2_type);
3584 debug_generic_expr (rhs3_type);
3585 return true;
3586 }
3587 break;
3588
3589 default:
3590 gcc_unreachable ();
3591 }
3592 return false;
3593 }
3594
3595 /* Verify a gimple assignment statement STMT with a single rhs.
3596 Returns true if anything is wrong. */
3597
3598 static bool
3599 verify_gimple_assign_single (gimple stmt)
3600 {
3601 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3602 tree lhs = gimple_assign_lhs (stmt);
3603 tree lhs_type = TREE_TYPE (lhs);
3604 tree rhs1 = gimple_assign_rhs1 (stmt);
3605 tree rhs1_type = TREE_TYPE (rhs1);
3606 bool res = false;
3607
3608 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3609 {
3610 error ("non-trivial conversion at assignment");
3611 debug_generic_expr (lhs_type);
3612 debug_generic_expr (rhs1_type);
3613 return true;
3614 }
3615
3616 if (handled_component_p (lhs))
3617 res |= verify_types_in_gimple_reference (lhs, true);
3618
3619 /* Special codes we cannot handle via their class. */
3620 switch (rhs_code)
3621 {
3622 case ADDR_EXPR:
3623 {
3624 tree op = TREE_OPERAND (rhs1, 0);
3625 if (!is_gimple_addressable (op))
3626 {
3627 error ("invalid operand in unary expression");
3628 return true;
3629 }
3630
3631 if (!types_compatible_p (TREE_TYPE (op), TREE_TYPE (TREE_TYPE (rhs1)))
3632 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3633 TREE_TYPE (op)))
3634 {
3635 error ("type mismatch in address expression");
3636 debug_generic_stmt (TREE_TYPE (rhs1));
3637 debug_generic_stmt (TREE_TYPE (op));
3638 return true;
3639 }
3640
3641 return verify_types_in_gimple_reference (op, true);
3642 }
3643
3644 /* tcc_reference */
3645 case COMPONENT_REF:
3646 case BIT_FIELD_REF:
3647 case INDIRECT_REF:
3648 case ALIGN_INDIRECT_REF:
3649 case MISALIGNED_INDIRECT_REF:
3650 case ARRAY_REF:
3651 case ARRAY_RANGE_REF:
3652 case VIEW_CONVERT_EXPR:
3653 case REALPART_EXPR:
3654 case IMAGPART_EXPR:
3655 case TARGET_MEM_REF:
3656 if (!is_gimple_reg (lhs)
3657 && is_gimple_reg_type (TREE_TYPE (lhs)))
3658 {
3659 error ("invalid rhs for gimple memory store");
3660 debug_generic_stmt (lhs);
3661 debug_generic_stmt (rhs1);
3662 return true;
3663 }
3664 return res || verify_types_in_gimple_reference (rhs1, false);
3665
3666 /* tcc_constant */
3667 case SSA_NAME:
3668 case INTEGER_CST:
3669 case REAL_CST:
3670 case FIXED_CST:
3671 case COMPLEX_CST:
3672 case VECTOR_CST:
3673 case STRING_CST:
3674 return res;
3675
3676 /* tcc_declaration */
3677 case CONST_DECL:
3678 return res;
3679 case VAR_DECL:
3680 case PARM_DECL:
3681 if (!is_gimple_reg (lhs)
3682 && !is_gimple_reg (rhs1)
3683 && is_gimple_reg_type (TREE_TYPE (lhs)))
3684 {
3685 error ("invalid rhs for gimple memory store");
3686 debug_generic_stmt (lhs);
3687 debug_generic_stmt (rhs1);
3688 return true;
3689 }
3690 return res;
3691
3692 case COND_EXPR:
3693 if (!is_gimple_reg (lhs)
3694 || (!is_gimple_reg (TREE_OPERAND (rhs1, 0))
3695 && !COMPARISON_CLASS_P (TREE_OPERAND (rhs1, 0)))
3696 || (!is_gimple_reg (TREE_OPERAND (rhs1, 1))
3697 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 1)))
3698 || (!is_gimple_reg (TREE_OPERAND (rhs1, 2))
3699 && !is_gimple_min_invariant (TREE_OPERAND (rhs1, 2))))
3700 {
3701 error ("invalid COND_EXPR in gimple assignment");
3702 debug_generic_stmt (rhs1);
3703 return true;
3704 }
3705 return res;
3706
3707 case CONSTRUCTOR:
3708 case OBJ_TYPE_REF:
3709 case ASSERT_EXPR:
3710 case WITH_SIZE_EXPR:
3711 case POLYNOMIAL_CHREC:
3712 case DOT_PROD_EXPR:
3713 case VEC_COND_EXPR:
3714 case REALIGN_LOAD_EXPR:
3715 /* FIXME. */
3716 return res;
3717
3718 default:;
3719 }
3720
3721 return res;
3722 }
3723
3724 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
3725 is a problem, otherwise false. */
3726
3727 static bool
3728 verify_gimple_assign (gimple stmt)
3729 {
3730 switch (gimple_assign_rhs_class (stmt))
3731 {
3732 case GIMPLE_SINGLE_RHS:
3733 return verify_gimple_assign_single (stmt);
3734
3735 case GIMPLE_UNARY_RHS:
3736 return verify_gimple_assign_unary (stmt);
3737
3738 case GIMPLE_BINARY_RHS:
3739 return verify_gimple_assign_binary (stmt);
3740
3741 case GIMPLE_TERNARY_RHS:
3742 return verify_gimple_assign_ternary (stmt);
3743
3744 default:
3745 gcc_unreachable ();
3746 }
3747 }
3748
3749 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
3750 is a problem, otherwise false. */
3751
3752 static bool
3753 verify_gimple_return (gimple stmt)
3754 {
3755 tree op = gimple_return_retval (stmt);
3756 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
3757
3758 /* We cannot test for present return values as we do not fix up missing
3759 return values from the original source. */
3760 if (op == NULL)
3761 return false;
3762
3763 if (!is_gimple_val (op)
3764 && TREE_CODE (op) != RESULT_DECL)
3765 {
3766 error ("invalid operand in return statement");
3767 debug_generic_stmt (op);
3768 return true;
3769 }
3770
3771 if (!useless_type_conversion_p (restype, TREE_TYPE (op))
3772 /* ??? With C++ we can have the situation that the result
3773 decl is a reference type while the return type is an aggregate. */
3774 && !(TREE_CODE (op) == RESULT_DECL
3775 && TREE_CODE (TREE_TYPE (op)) == REFERENCE_TYPE
3776 && useless_type_conversion_p (restype, TREE_TYPE (TREE_TYPE (op)))))
3777 {
3778 error ("invalid conversion in return statement");
3779 debug_generic_stmt (restype);
3780 debug_generic_stmt (TREE_TYPE (op));
3781 return true;
3782 }
3783
3784 return false;
3785 }
3786
3787
3788 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
3789 is a problem, otherwise false. */
3790
3791 static bool
3792 verify_gimple_goto (gimple stmt)
3793 {
3794 tree dest = gimple_goto_dest (stmt);
3795
3796 /* ??? We have two canonical forms of direct goto destinations, a
3797 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
3798 if (TREE_CODE (dest) != LABEL_DECL
3799 && (!is_gimple_val (dest)
3800 || !POINTER_TYPE_P (TREE_TYPE (dest))))
3801 {
3802 error ("goto destination is neither a label nor a pointer");
3803 return true;
3804 }
3805
3806 return false;
3807 }
3808
3809 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
3810 is a problem, otherwise false. */
3811
3812 static bool
3813 verify_gimple_switch (gimple stmt)
3814 {
3815 if (!is_gimple_val (gimple_switch_index (stmt)))
3816 {
3817 error ("invalid operand to switch statement");
3818 debug_generic_stmt (gimple_switch_index (stmt));
3819 return true;
3820 }
3821
3822 return false;
3823 }
3824
3825
3826 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
3827 and false otherwise. */
3828
3829 static bool
3830 verify_gimple_phi (gimple stmt)
3831 {
3832 tree type = TREE_TYPE (gimple_phi_result (stmt));
3833 unsigned i;
3834
3835 if (TREE_CODE (gimple_phi_result (stmt)) != SSA_NAME)
3836 {
3837 error ("Invalid PHI result");
3838 return true;
3839 }
3840
3841 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3842 {
3843 tree arg = gimple_phi_arg_def (stmt, i);
3844 if ((is_gimple_reg (gimple_phi_result (stmt))
3845 && !is_gimple_val (arg))
3846 || (!is_gimple_reg (gimple_phi_result (stmt))
3847 && !is_gimple_addressable (arg)))
3848 {
3849 error ("Invalid PHI argument");
3850 debug_generic_stmt (arg);
3851 return true;
3852 }
3853 if (!useless_type_conversion_p (type, TREE_TYPE (arg)))
3854 {
3855 error ("Incompatible types in PHI argument %u", i);
3856 debug_generic_stmt (type);
3857 debug_generic_stmt (TREE_TYPE (arg));
3858 return true;
3859 }
3860 }
3861
3862 return false;
3863 }
3864
3865
3866 /* Verify a gimple debug statement STMT.
3867 Returns true if anything is wrong. */
3868
3869 static bool
3870 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
3871 {
3872 /* There isn't much that could be wrong in a gimple debug stmt. A
3873 gimple debug bind stmt, for example, maps a tree, that's usually
3874 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
3875 component or member of an aggregate type, to another tree, that
3876 can be an arbitrary expression. These stmts expand into debug
3877 insns, and are converted to debug notes by var-tracking.c. */
3878 return false;
3879 }
3880
3881
3882 /* Verify the GIMPLE statement STMT. Returns true if there is an
3883 error, otherwise false. */
3884
3885 static bool
3886 verify_types_in_gimple_stmt (gimple stmt)
3887 {
3888 switch (gimple_code (stmt))
3889 {
3890 case GIMPLE_ASSIGN:
3891 return verify_gimple_assign (stmt);
3892
3893 case GIMPLE_LABEL:
3894 return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
3895
3896 case GIMPLE_CALL:
3897 return verify_gimple_call (stmt);
3898
3899 case GIMPLE_COND:
3900 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
3901 {
3902 error ("invalid comparison code in gimple cond");
3903 return true;
3904 }
3905 if (!(!gimple_cond_true_label (stmt)
3906 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
3907 || !(!gimple_cond_false_label (stmt)
3908 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
3909 {
3910 error ("invalid labels in gimple cond");
3911 return true;
3912 }
3913
3914 return verify_gimple_comparison (boolean_type_node,
3915 gimple_cond_lhs (stmt),
3916 gimple_cond_rhs (stmt));
3917
3918 case GIMPLE_GOTO:
3919 return verify_gimple_goto (stmt);
3920
3921 case GIMPLE_SWITCH:
3922 return verify_gimple_switch (stmt);
3923
3924 case GIMPLE_RETURN:
3925 return verify_gimple_return (stmt);
3926
3927 case GIMPLE_ASM:
3928 return false;
3929
3930 case GIMPLE_PHI:
3931 return verify_gimple_phi (stmt);
3932
3933 /* Tuples that do not have tree operands. */
3934 case GIMPLE_NOP:
3935 case GIMPLE_PREDICT:
3936 case GIMPLE_RESX:
3937 case GIMPLE_EH_DISPATCH:
3938 case GIMPLE_EH_MUST_NOT_THROW:
3939 return false;
3940
3941 CASE_GIMPLE_OMP:
3942 /* OpenMP directives are validated by the FE and never operated
3943 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
3944 non-gimple expressions when the main index variable has had
3945 its address taken. This does not affect the loop itself
3946 because the header of an GIMPLE_OMP_FOR is merely used to determine
3947 how to setup the parallel iteration. */
3948 return false;
3949
3950 case GIMPLE_DEBUG:
3951 return verify_gimple_debug (stmt);
3952
3953 default:
3954 gcc_unreachable ();
3955 }
3956 }
3957
3958 /* Verify the GIMPLE statements inside the sequence STMTS. */
3959
3960 static bool
3961 verify_types_in_gimple_seq_2 (gimple_seq stmts)
3962 {
3963 gimple_stmt_iterator ittr;
3964 bool err = false;
3965
3966 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
3967 {
3968 gimple stmt = gsi_stmt (ittr);
3969
3970 switch (gimple_code (stmt))
3971 {
3972 case GIMPLE_BIND:
3973 err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
3974 break;
3975
3976 case GIMPLE_TRY:
3977 err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
3978 err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
3979 break;
3980
3981 case GIMPLE_EH_FILTER:
3982 err |= verify_types_in_gimple_seq_2 (gimple_eh_filter_failure (stmt));
3983 break;
3984
3985 case GIMPLE_CATCH:
3986 err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
3987 break;
3988
3989 default:
3990 {
3991 bool err2 = verify_types_in_gimple_stmt (stmt);
3992 if (err2)
3993 debug_gimple_stmt (stmt);
3994 err |= err2;
3995 }
3996 }
3997 }
3998
3999 return err;
4000 }
4001
4002
4003 /* Verify the GIMPLE statements inside the statement list STMTS. */
4004
4005 void
4006 verify_types_in_gimple_seq (gimple_seq stmts)
4007 {
4008 if (verify_types_in_gimple_seq_2 (stmts))
4009 internal_error ("verify_gimple failed");
4010 }
4011
4012
4013 /* Verify STMT, return true if STMT is not in GIMPLE form.
4014 TODO: Implement type checking. */
4015
4016 static bool
4017 verify_stmt (gimple_stmt_iterator *gsi)
4018 {
4019 tree addr;
4020 struct walk_stmt_info wi;
4021 bool last_in_block = gsi_one_before_end_p (*gsi);
4022 gimple stmt = gsi_stmt (*gsi);
4023 int lp_nr;
4024
4025 if (is_gimple_omp (stmt))
4026 {
4027 /* OpenMP directives are validated by the FE and never operated
4028 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4029 non-gimple expressions when the main index variable has had
4030 its address taken. This does not affect the loop itself
4031 because the header of an GIMPLE_OMP_FOR is merely used to determine
4032 how to setup the parallel iteration. */
4033 return false;
4034 }
4035
4036 /* FIXME. The C frontend passes unpromoted arguments in case it
4037 didn't see a function declaration before the call. */
4038 if (is_gimple_call (stmt))
4039 {
4040 tree decl;
4041
4042 if (!is_gimple_call_addr (gimple_call_fn (stmt)))
4043 {
4044 error ("invalid function in call statement");
4045 return true;
4046 }
4047
4048 decl = gimple_call_fndecl (stmt);
4049 if (decl
4050 && TREE_CODE (decl) == FUNCTION_DECL
4051 && DECL_LOOPING_CONST_OR_PURE_P (decl)
4052 && (!DECL_PURE_P (decl))
4053 && (!TREE_READONLY (decl)))
4054 {
4055 error ("invalid pure const state for function");
4056 return true;
4057 }
4058 }
4059
4060 if (is_gimple_debug (stmt))
4061 return false;
4062
4063 memset (&wi, 0, sizeof (wi));
4064 addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
4065 if (addr)
4066 {
4067 debug_generic_expr (addr);
4068 inform (gimple_location (gsi_stmt (*gsi)), "in statement");
4069 debug_gimple_stmt (stmt);
4070 return true;
4071 }
4072
4073 /* If the statement is marked as part of an EH region, then it is
4074 expected that the statement could throw. Verify that when we
4075 have optimizations that simplify statements such that we prove
4076 that they cannot throw, that we update other data structures
4077 to match. */
4078 lp_nr = lookup_stmt_eh_lp (stmt);
4079 if (lp_nr != 0)
4080 {
4081 if (!stmt_could_throw_p (stmt))
4082 {
4083 error ("statement marked for throw, but doesn%'t");
4084 goto fail;
4085 }
4086 else if (lp_nr > 0 && !last_in_block && stmt_can_throw_internal (stmt))
4087 {
4088 error ("statement marked for throw in middle of block");
4089 goto fail;
4090 }
4091 }
4092
4093 return false;
4094
4095 fail:
4096 debug_gimple_stmt (stmt);
4097 return true;
4098 }
4099
4100
4101 /* Return true when the T can be shared. */
4102
4103 bool
4104 tree_node_can_be_shared (tree t)
4105 {
4106 if (IS_TYPE_OR_DECL_P (t)
4107 || is_gimple_min_invariant (t)
4108 || TREE_CODE (t) == SSA_NAME
4109 || t == error_mark_node
4110 || TREE_CODE (t) == IDENTIFIER_NODE)
4111 return true;
4112
4113 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4114 return true;
4115
4116 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4117 && is_gimple_min_invariant (TREE_OPERAND (t, 1)))
4118 || TREE_CODE (t) == COMPONENT_REF
4119 || TREE_CODE (t) == REALPART_EXPR
4120 || TREE_CODE (t) == IMAGPART_EXPR)
4121 t = TREE_OPERAND (t, 0);
4122
4123 if (DECL_P (t))
4124 return true;
4125
4126 return false;
4127 }
4128
4129
4130 /* Called via walk_gimple_stmt. Verify tree sharing. */
4131
4132 static tree
4133 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4134 {
4135 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4136 struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
4137
4138 if (tree_node_can_be_shared (*tp))
4139 {
4140 *walk_subtrees = false;
4141 return NULL;
4142 }
4143
4144 if (pointer_set_insert (visited, *tp))
4145 return *tp;
4146
4147 return NULL;
4148 }
4149
4150
4151 static bool eh_error_found;
4152 static int
4153 verify_eh_throw_stmt_node (void **slot, void *data)
4154 {
4155 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4156 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4157
4158 if (!pointer_set_contains (visited, node->stmt))
4159 {
4160 error ("Dead STMT in EH table");
4161 debug_gimple_stmt (node->stmt);
4162 eh_error_found = true;
4163 }
4164 return 1;
4165 }
4166
4167
4168 /* Verify the GIMPLE statements in every basic block. */
4169
4170 DEBUG_FUNCTION void
4171 verify_stmts (void)
4172 {
4173 basic_block bb;
4174 gimple_stmt_iterator gsi;
4175 bool err = false;
4176 struct pointer_set_t *visited, *visited_stmts;
4177 tree addr;
4178 struct walk_stmt_info wi;
4179
4180 timevar_push (TV_TREE_STMT_VERIFY);
4181 visited = pointer_set_create ();
4182 visited_stmts = pointer_set_create ();
4183
4184 memset (&wi, 0, sizeof (wi));
4185 wi.info = (void *) visited;
4186
4187 FOR_EACH_BB (bb)
4188 {
4189 gimple phi;
4190 size_t i;
4191
4192 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4193 {
4194 phi = gsi_stmt (gsi);
4195 pointer_set_insert (visited_stmts, phi);
4196 if (gimple_bb (phi) != bb)
4197 {
4198 error ("gimple_bb (phi) is set to a wrong basic block");
4199 err |= true;
4200 }
4201
4202 for (i = 0; i < gimple_phi_num_args (phi); i++)
4203 {
4204 tree t = gimple_phi_arg_def (phi, i);
4205 tree addr;
4206
4207 if (!t)
4208 {
4209 error ("missing PHI def");
4210 debug_gimple_stmt (phi);
4211 err |= true;
4212 continue;
4213 }
4214 /* Addressable variables do have SSA_NAMEs but they
4215 are not considered gimple values. */
4216 else if (TREE_CODE (t) != SSA_NAME
4217 && TREE_CODE (t) != FUNCTION_DECL
4218 && !is_gimple_min_invariant (t))
4219 {
4220 error ("PHI argument is not a GIMPLE value");
4221 debug_gimple_stmt (phi);
4222 debug_generic_expr (t);
4223 err |= true;
4224 }
4225
4226 addr = walk_tree (&t, verify_node_sharing, visited, NULL);
4227 if (addr)
4228 {
4229 error ("incorrect sharing of tree nodes");
4230 debug_gimple_stmt (phi);
4231 debug_generic_expr (addr);
4232 err |= true;
4233 }
4234 }
4235
4236 #ifdef ENABLE_TYPES_CHECKING
4237 if (verify_gimple_phi (phi))
4238 {
4239 debug_gimple_stmt (phi);
4240 err |= true;
4241 }
4242 #endif
4243 }
4244
4245 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
4246 {
4247 gimple stmt = gsi_stmt (gsi);
4248
4249 if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
4250 || gimple_code (stmt) == GIMPLE_BIND)
4251 {
4252 error ("invalid GIMPLE statement");
4253 debug_gimple_stmt (stmt);
4254 err |= true;
4255 }
4256
4257 pointer_set_insert (visited_stmts, stmt);
4258
4259 if (gimple_bb (stmt) != bb)
4260 {
4261 error ("gimple_bb (stmt) is set to a wrong basic block");
4262 debug_gimple_stmt (stmt);
4263 err |= true;
4264 }
4265
4266 if (gimple_code (stmt) == GIMPLE_LABEL)
4267 {
4268 tree decl = gimple_label_label (stmt);
4269 int uid = LABEL_DECL_UID (decl);
4270
4271 if (uid == -1
4272 || VEC_index (basic_block, label_to_block_map, uid) != bb)
4273 {
4274 error ("incorrect entry in label_to_block_map");
4275 err |= true;
4276 }
4277
4278 uid = EH_LANDING_PAD_NR (decl);
4279 if (uid)
4280 {
4281 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4282 if (decl != lp->post_landing_pad)
4283 {
4284 error ("incorrect setting of landing pad number");
4285 err |= true;
4286 }
4287 }
4288 }
4289
4290 err |= verify_stmt (&gsi);
4291
4292 #ifdef ENABLE_TYPES_CHECKING
4293 if (verify_types_in_gimple_stmt (gsi_stmt (gsi)))
4294 {
4295 debug_gimple_stmt (stmt);
4296 err |= true;
4297 }
4298 #endif
4299 addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
4300 if (addr)
4301 {
4302 error ("incorrect sharing of tree nodes");
4303 debug_gimple_stmt (stmt);
4304 debug_generic_expr (addr);
4305 err |= true;
4306 }
4307 gsi_next (&gsi);
4308 }
4309 }
4310
4311 eh_error_found = false;
4312 if (get_eh_throw_stmt_table (cfun))
4313 htab_traverse (get_eh_throw_stmt_table (cfun),
4314 verify_eh_throw_stmt_node,
4315 visited_stmts);
4316
4317 if (err | eh_error_found)
4318 internal_error ("verify_stmts failed");
4319
4320 pointer_set_destroy (visited);
4321 pointer_set_destroy (visited_stmts);
4322 verify_histograms ();
4323 timevar_pop (TV_TREE_STMT_VERIFY);
4324 }
4325
4326
4327 /* Verifies that the flow information is OK. */
4328
4329 static int
4330 gimple_verify_flow_info (void)
4331 {
4332 int err = 0;
4333 basic_block bb;
4334 gimple_stmt_iterator gsi;
4335 gimple stmt;
4336 edge e;
4337 edge_iterator ei;
4338
4339 if (ENTRY_BLOCK_PTR->il.gimple)
4340 {
4341 error ("ENTRY_BLOCK has IL associated with it");
4342 err = 1;
4343 }
4344
4345 if (EXIT_BLOCK_PTR->il.gimple)
4346 {
4347 error ("EXIT_BLOCK has IL associated with it");
4348 err = 1;
4349 }
4350
4351 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4352 if (e->flags & EDGE_FALLTHRU)
4353 {
4354 error ("fallthru to exit from bb %d", e->src->index);
4355 err = 1;
4356 }
4357
4358 FOR_EACH_BB (bb)
4359 {
4360 bool found_ctrl_stmt = false;
4361
4362 stmt = NULL;
4363
4364 /* Skip labels on the start of basic block. */
4365 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4366 {
4367 tree label;
4368 gimple prev_stmt = stmt;
4369
4370 stmt = gsi_stmt (gsi);
4371
4372 if (gimple_code (stmt) != GIMPLE_LABEL)
4373 break;
4374
4375 label = gimple_label_label (stmt);
4376 if (prev_stmt && DECL_NONLOCAL (label))
4377 {
4378 error ("nonlocal label ");
4379 print_generic_expr (stderr, label, 0);
4380 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4381 bb->index);
4382 err = 1;
4383 }
4384
4385 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4386 {
4387 error ("EH landing pad label ");
4388 print_generic_expr (stderr, label, 0);
4389 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4390 bb->index);
4391 err = 1;
4392 }
4393
4394 if (label_to_block (label) != bb)
4395 {
4396 error ("label ");
4397 print_generic_expr (stderr, label, 0);
4398 fprintf (stderr, " to block does not match in bb %d",
4399 bb->index);
4400 err = 1;
4401 }
4402
4403 if (decl_function_context (label) != current_function_decl)
4404 {
4405 error ("label ");
4406 print_generic_expr (stderr, label, 0);
4407 fprintf (stderr, " has incorrect context in bb %d",
4408 bb->index);
4409 err = 1;
4410 }
4411 }
4412
4413 /* Verify that body of basic block BB is free of control flow. */
4414 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4415 {
4416 gimple stmt = gsi_stmt (gsi);
4417
4418 if (found_ctrl_stmt)
4419 {
4420 error ("control flow in the middle of basic block %d",
4421 bb->index);
4422 err = 1;
4423 }
4424
4425 if (stmt_ends_bb_p (stmt))
4426 found_ctrl_stmt = true;
4427
4428 if (gimple_code (stmt) == GIMPLE_LABEL)
4429 {
4430 error ("label ");
4431 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4432 fprintf (stderr, " in the middle of basic block %d", bb->index);
4433 err = 1;
4434 }
4435 }
4436
4437 gsi = gsi_last_bb (bb);
4438 if (gsi_end_p (gsi))
4439 continue;
4440
4441 stmt = gsi_stmt (gsi);
4442
4443 if (gimple_code (stmt) == GIMPLE_LABEL)
4444 continue;
4445
4446 err |= verify_eh_edges (stmt);
4447
4448 if (is_ctrl_stmt (stmt))
4449 {
4450 FOR_EACH_EDGE (e, ei, bb->succs)
4451 if (e->flags & EDGE_FALLTHRU)
4452 {
4453 error ("fallthru edge after a control statement in bb %d",
4454 bb->index);
4455 err = 1;
4456 }
4457 }
4458
4459 if (gimple_code (stmt) != GIMPLE_COND)
4460 {
4461 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4462 after anything else but if statement. */
4463 FOR_EACH_EDGE (e, ei, bb->succs)
4464 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4465 {
4466 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4467 bb->index);
4468 err = 1;
4469 }
4470 }
4471
4472 switch (gimple_code (stmt))
4473 {
4474 case GIMPLE_COND:
4475 {
4476 edge true_edge;
4477 edge false_edge;
4478
4479 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4480
4481 if (!true_edge
4482 || !false_edge
4483 || !(true_edge->flags & EDGE_TRUE_VALUE)
4484 || !(false_edge->flags & EDGE_FALSE_VALUE)
4485 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4486 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4487 || EDGE_COUNT (bb->succs) >= 3)
4488 {
4489 error ("wrong outgoing edge flags at end of bb %d",
4490 bb->index);
4491 err = 1;
4492 }
4493 }
4494 break;
4495
4496 case GIMPLE_GOTO:
4497 if (simple_goto_p (stmt))
4498 {
4499 error ("explicit goto at end of bb %d", bb->index);
4500 err = 1;
4501 }
4502 else
4503 {
4504 /* FIXME. We should double check that the labels in the
4505 destination blocks have their address taken. */
4506 FOR_EACH_EDGE (e, ei, bb->succs)
4507 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4508 | EDGE_FALSE_VALUE))
4509 || !(e->flags & EDGE_ABNORMAL))
4510 {
4511 error ("wrong outgoing edge flags at end of bb %d",
4512 bb->index);
4513 err = 1;
4514 }
4515 }
4516 break;
4517
4518 case GIMPLE_CALL:
4519 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4520 break;
4521 /* ... fallthru ... */
4522 case GIMPLE_RETURN:
4523 if (!single_succ_p (bb)
4524 || (single_succ_edge (bb)->flags
4525 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4526 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4527 {
4528 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4529 err = 1;
4530 }
4531 if (single_succ (bb) != EXIT_BLOCK_PTR)
4532 {
4533 error ("return edge does not point to exit in bb %d",
4534 bb->index);
4535 err = 1;
4536 }
4537 break;
4538
4539 case GIMPLE_SWITCH:
4540 {
4541 tree prev;
4542 edge e;
4543 size_t i, n;
4544
4545 n = gimple_switch_num_labels (stmt);
4546
4547 /* Mark all the destination basic blocks. */
4548 for (i = 0; i < n; ++i)
4549 {
4550 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4551 basic_block label_bb = label_to_block (lab);
4552 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
4553 label_bb->aux = (void *)1;
4554 }
4555
4556 /* Verify that the case labels are sorted. */
4557 prev = gimple_switch_label (stmt, 0);
4558 for (i = 1; i < n; ++i)
4559 {
4560 tree c = gimple_switch_label (stmt, i);
4561 if (!CASE_LOW (c))
4562 {
4563 error ("found default case not at the start of "
4564 "case vector");
4565 err = 1;
4566 continue;
4567 }
4568 if (CASE_LOW (prev)
4569 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
4570 {
4571 error ("case labels not sorted: ");
4572 print_generic_expr (stderr, prev, 0);
4573 fprintf (stderr," is greater than ");
4574 print_generic_expr (stderr, c, 0);
4575 fprintf (stderr," but comes before it.\n");
4576 err = 1;
4577 }
4578 prev = c;
4579 }
4580 /* VRP will remove the default case if it can prove it will
4581 never be executed. So do not verify there always exists
4582 a default case here. */
4583
4584 FOR_EACH_EDGE (e, ei, bb->succs)
4585 {
4586 if (!e->dest->aux)
4587 {
4588 error ("extra outgoing edge %d->%d",
4589 bb->index, e->dest->index);
4590 err = 1;
4591 }
4592
4593 e->dest->aux = (void *)2;
4594 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
4595 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4596 {
4597 error ("wrong outgoing edge flags at end of bb %d",
4598 bb->index);
4599 err = 1;
4600 }
4601 }
4602
4603 /* Check that we have all of them. */
4604 for (i = 0; i < n; ++i)
4605 {
4606 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
4607 basic_block label_bb = label_to_block (lab);
4608
4609 if (label_bb->aux != (void *)2)
4610 {
4611 error ("missing edge %i->%i", bb->index, label_bb->index);
4612 err = 1;
4613 }
4614 }
4615
4616 FOR_EACH_EDGE (e, ei, bb->succs)
4617 e->dest->aux = (void *)0;
4618 }
4619 break;
4620
4621 case GIMPLE_EH_DISPATCH:
4622 err |= verify_eh_dispatch_edge (stmt);
4623 break;
4624
4625 default:
4626 break;
4627 }
4628 }
4629
4630 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
4631 verify_dominators (CDI_DOMINATORS);
4632
4633 return err;
4634 }
4635
4636
4637 /* Updates phi nodes after creating a forwarder block joined
4638 by edge FALLTHRU. */
4639
4640 static void
4641 gimple_make_forwarder_block (edge fallthru)
4642 {
4643 edge e;
4644 edge_iterator ei;
4645 basic_block dummy, bb;
4646 tree var;
4647 gimple_stmt_iterator gsi;
4648
4649 dummy = fallthru->src;
4650 bb = fallthru->dest;
4651
4652 if (single_pred_p (bb))
4653 return;
4654
4655 /* If we redirected a branch we must create new PHI nodes at the
4656 start of BB. */
4657 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
4658 {
4659 gimple phi, new_phi;
4660
4661 phi = gsi_stmt (gsi);
4662 var = gimple_phi_result (phi);
4663 new_phi = create_phi_node (var, bb);
4664 SSA_NAME_DEF_STMT (var) = new_phi;
4665 gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
4666 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
4667 UNKNOWN_LOCATION);
4668 }
4669
4670 /* Add the arguments we have stored on edges. */
4671 FOR_EACH_EDGE (e, ei, bb->preds)
4672 {
4673 if (e == fallthru)
4674 continue;
4675
4676 flush_pending_stmts (e);
4677 }
4678 }
4679
4680
4681 /* Return a non-special label in the head of basic block BLOCK.
4682 Create one if it doesn't exist. */
4683
4684 tree
4685 gimple_block_label (basic_block bb)
4686 {
4687 gimple_stmt_iterator i, s = gsi_start_bb (bb);
4688 bool first = true;
4689 tree label;
4690 gimple stmt;
4691
4692 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
4693 {
4694 stmt = gsi_stmt (i);
4695 if (gimple_code (stmt) != GIMPLE_LABEL)
4696 break;
4697 label = gimple_label_label (stmt);
4698 if (!DECL_NONLOCAL (label))
4699 {
4700 if (!first)
4701 gsi_move_before (&i, &s);
4702 return label;
4703 }
4704 }
4705
4706 label = create_artificial_label (UNKNOWN_LOCATION);
4707 stmt = gimple_build_label (label);
4708 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
4709 return label;
4710 }
4711
4712
4713 /* Attempt to perform edge redirection by replacing a possibly complex
4714 jump instruction by a goto or by removing the jump completely.
4715 This can apply only if all edges now point to the same block. The
4716 parameters and return values are equivalent to
4717 redirect_edge_and_branch. */
4718
4719 static edge
4720 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
4721 {
4722 basic_block src = e->src;
4723 gimple_stmt_iterator i;
4724 gimple stmt;
4725
4726 /* We can replace or remove a complex jump only when we have exactly
4727 two edges. */
4728 if (EDGE_COUNT (src->succs) != 2
4729 /* Verify that all targets will be TARGET. Specifically, the
4730 edge that is not E must also go to TARGET. */
4731 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
4732 return NULL;
4733
4734 i = gsi_last_bb (src);
4735 if (gsi_end_p (i))
4736 return NULL;
4737
4738 stmt = gsi_stmt (i);
4739
4740 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
4741 {
4742 gsi_remove (&i, true);
4743 e = ssa_redirect_edge (e, target);
4744 e->flags = EDGE_FALLTHRU;
4745 return e;
4746 }
4747
4748 return NULL;
4749 }
4750
4751
4752 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
4753 edge representing the redirected branch. */
4754
4755 static edge
4756 gimple_redirect_edge_and_branch (edge e, basic_block dest)
4757 {
4758 basic_block bb = e->src;
4759 gimple_stmt_iterator gsi;
4760 edge ret;
4761 gimple stmt;
4762
4763 if (e->flags & EDGE_ABNORMAL)
4764 return NULL;
4765
4766 if (e->dest == dest)
4767 return NULL;
4768
4769 if (e->flags & EDGE_EH)
4770 return redirect_eh_edge (e, dest);
4771
4772 if (e->src != ENTRY_BLOCK_PTR)
4773 {
4774 ret = gimple_try_redirect_by_replacing_jump (e, dest);
4775 if (ret)
4776 return ret;
4777 }
4778
4779 gsi = gsi_last_bb (bb);
4780 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
4781
4782 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
4783 {
4784 case GIMPLE_COND:
4785 /* For COND_EXPR, we only need to redirect the edge. */
4786 break;
4787
4788 case GIMPLE_GOTO:
4789 /* No non-abnormal edges should lead from a non-simple goto, and
4790 simple ones should be represented implicitly. */
4791 gcc_unreachable ();
4792
4793 case GIMPLE_SWITCH:
4794 {
4795 tree label = gimple_block_label (dest);
4796 tree cases = get_cases_for_edge (e, stmt);
4797
4798 /* If we have a list of cases associated with E, then use it
4799 as it's a lot faster than walking the entire case vector. */
4800 if (cases)
4801 {
4802 edge e2 = find_edge (e->src, dest);
4803 tree last, first;
4804
4805 first = cases;
4806 while (cases)
4807 {
4808 last = cases;
4809 CASE_LABEL (cases) = label;
4810 cases = TREE_CHAIN (cases);
4811 }
4812
4813 /* If there was already an edge in the CFG, then we need
4814 to move all the cases associated with E to E2. */
4815 if (e2)
4816 {
4817 tree cases2 = get_cases_for_edge (e2, stmt);
4818
4819 TREE_CHAIN (last) = TREE_CHAIN (cases2);
4820 TREE_CHAIN (cases2) = first;
4821 }
4822 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
4823 }
4824 else
4825 {
4826 size_t i, n = gimple_switch_num_labels (stmt);
4827
4828 for (i = 0; i < n; i++)
4829 {
4830 tree elt = gimple_switch_label (stmt, i);
4831 if (label_to_block (CASE_LABEL (elt)) == e->dest)
4832 CASE_LABEL (elt) = label;
4833 }
4834 }
4835 }
4836 break;
4837
4838 case GIMPLE_ASM:
4839 {
4840 int i, n = gimple_asm_nlabels (stmt);
4841 tree label = NULL;
4842
4843 for (i = 0; i < n; ++i)
4844 {
4845 tree cons = gimple_asm_label_op (stmt, i);
4846 if (label_to_block (TREE_VALUE (cons)) == e->dest)
4847 {
4848 if (!label)
4849 label = gimple_block_label (dest);
4850 TREE_VALUE (cons) = label;
4851 }
4852 }
4853
4854 /* If we didn't find any label matching the former edge in the
4855 asm labels, we must be redirecting the fallthrough
4856 edge. */
4857 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
4858 }
4859 break;
4860
4861 case GIMPLE_RETURN:
4862 gsi_remove (&gsi, true);
4863 e->flags |= EDGE_FALLTHRU;
4864 break;
4865
4866 case GIMPLE_OMP_RETURN:
4867 case GIMPLE_OMP_CONTINUE:
4868 case GIMPLE_OMP_SECTIONS_SWITCH:
4869 case GIMPLE_OMP_FOR:
4870 /* The edges from OMP constructs can be simply redirected. */
4871 break;
4872
4873 case GIMPLE_EH_DISPATCH:
4874 if (!(e->flags & EDGE_FALLTHRU))
4875 redirect_eh_dispatch_edge (stmt, e, dest);
4876 break;
4877
4878 default:
4879 /* Otherwise it must be a fallthru edge, and we don't need to
4880 do anything besides redirecting it. */
4881 gcc_assert (e->flags & EDGE_FALLTHRU);
4882 break;
4883 }
4884
4885 /* Update/insert PHI nodes as necessary. */
4886
4887 /* Now update the edges in the CFG. */
4888 e = ssa_redirect_edge (e, dest);
4889
4890 return e;
4891 }
4892
4893 /* Returns true if it is possible to remove edge E by redirecting
4894 it to the destination of the other edge from E->src. */
4895
4896 static bool
4897 gimple_can_remove_branch_p (const_edge e)
4898 {
4899 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
4900 return false;
4901
4902 return true;
4903 }
4904
4905 /* Simple wrapper, as we can always redirect fallthru edges. */
4906
4907 static basic_block
4908 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
4909 {
4910 e = gimple_redirect_edge_and_branch (e, dest);
4911 gcc_assert (e);
4912
4913 return NULL;
4914 }
4915
4916
4917 /* Splits basic block BB after statement STMT (but at least after the
4918 labels). If STMT is NULL, BB is split just after the labels. */
4919
4920 static basic_block
4921 gimple_split_block (basic_block bb, void *stmt)
4922 {
4923 gimple_stmt_iterator gsi;
4924 gimple_stmt_iterator gsi_tgt;
4925 gimple act;
4926 gimple_seq list;
4927 basic_block new_bb;
4928 edge e;
4929 edge_iterator ei;
4930
4931 new_bb = create_empty_bb (bb);
4932
4933 /* Redirect the outgoing edges. */
4934 new_bb->succs = bb->succs;
4935 bb->succs = NULL;
4936 FOR_EACH_EDGE (e, ei, new_bb->succs)
4937 e->src = new_bb;
4938
4939 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
4940 stmt = NULL;
4941
4942 /* Move everything from GSI to the new basic block. */
4943 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4944 {
4945 act = gsi_stmt (gsi);
4946 if (gimple_code (act) == GIMPLE_LABEL)
4947 continue;
4948
4949 if (!stmt)
4950 break;
4951
4952 if (stmt == act)
4953 {
4954 gsi_next (&gsi);
4955 break;
4956 }
4957 }
4958
4959 if (gsi_end_p (gsi))
4960 return new_bb;
4961
4962 /* Split the statement list - avoid re-creating new containers as this
4963 brings ugly quadratic memory consumption in the inliner.
4964 (We are still quadratic since we need to update stmt BB pointers,
4965 sadly.) */
4966 list = gsi_split_seq_before (&gsi);
4967 set_bb_seq (new_bb, list);
4968 for (gsi_tgt = gsi_start (list);
4969 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
4970 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
4971
4972 return new_bb;
4973 }
4974
4975
4976 /* Moves basic block BB after block AFTER. */
4977
4978 static bool
4979 gimple_move_block_after (basic_block bb, basic_block after)
4980 {
4981 if (bb->prev_bb == after)
4982 return true;
4983
4984 unlink_block (bb);
4985 link_block (bb, after);
4986
4987 return true;
4988 }
4989
4990
4991 /* Return true if basic_block can be duplicated. */
4992
4993 static bool
4994 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
4995 {
4996 return true;
4997 }
4998
4999 /* Create a duplicate of the basic block BB. NOTE: This does not
5000 preserve SSA form. */
5001
5002 static basic_block
5003 gimple_duplicate_bb (basic_block bb)
5004 {
5005 basic_block new_bb;
5006 gimple_stmt_iterator gsi, gsi_tgt;
5007 gimple_seq phis = phi_nodes (bb);
5008 gimple phi, stmt, copy;
5009
5010 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5011
5012 /* Copy the PHI nodes. We ignore PHI node arguments here because
5013 the incoming edges have not been setup yet. */
5014 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5015 {
5016 phi = gsi_stmt (gsi);
5017 copy = create_phi_node (gimple_phi_result (phi), new_bb);
5018 create_new_def_for (gimple_phi_result (copy), copy,
5019 gimple_phi_result_ptr (copy));
5020 }
5021
5022 gsi_tgt = gsi_start_bb (new_bb);
5023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5024 {
5025 def_operand_p def_p;
5026 ssa_op_iter op_iter;
5027
5028 stmt = gsi_stmt (gsi);
5029 if (gimple_code (stmt) == GIMPLE_LABEL)
5030 continue;
5031
5032 /* Create a new copy of STMT and duplicate STMT's virtual
5033 operands. */
5034 copy = gimple_copy (stmt);
5035 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5036
5037 maybe_duplicate_eh_stmt (copy, stmt);
5038 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5039
5040 /* Create new names for all the definitions created by COPY and
5041 add replacement mappings for each new name. */
5042 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5043 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5044 }
5045
5046 return new_bb;
5047 }
5048
5049 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5050
5051 static void
5052 add_phi_args_after_copy_edge (edge e_copy)
5053 {
5054 basic_block bb, bb_copy = e_copy->src, dest;
5055 edge e;
5056 edge_iterator ei;
5057 gimple phi, phi_copy;
5058 tree def;
5059 gimple_stmt_iterator psi, psi_copy;
5060
5061 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5062 return;
5063
5064 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5065
5066 if (e_copy->dest->flags & BB_DUPLICATED)
5067 dest = get_bb_original (e_copy->dest);
5068 else
5069 dest = e_copy->dest;
5070
5071 e = find_edge (bb, dest);
5072 if (!e)
5073 {
5074 /* During loop unrolling the target of the latch edge is copied.
5075 In this case we are not looking for edge to dest, but to
5076 duplicated block whose original was dest. */
5077 FOR_EACH_EDGE (e, ei, bb->succs)
5078 {
5079 if ((e->dest->flags & BB_DUPLICATED)
5080 && get_bb_original (e->dest) == dest)
5081 break;
5082 }
5083
5084 gcc_assert (e != NULL);
5085 }
5086
5087 for (psi = gsi_start_phis (e->dest),
5088 psi_copy = gsi_start_phis (e_copy->dest);
5089 !gsi_end_p (psi);
5090 gsi_next (&psi), gsi_next (&psi_copy))
5091 {
5092 phi = gsi_stmt (psi);
5093 phi_copy = gsi_stmt (psi_copy);
5094 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5095 add_phi_arg (phi_copy, def, e_copy,
5096 gimple_phi_arg_location_from_edge (phi, e));
5097 }
5098 }
5099
5100
5101 /* Basic block BB_COPY was created by code duplication. Add phi node
5102 arguments for edges going out of BB_COPY. The blocks that were
5103 duplicated have BB_DUPLICATED set. */
5104
5105 void
5106 add_phi_args_after_copy_bb (basic_block bb_copy)
5107 {
5108 edge e_copy;
5109 edge_iterator ei;
5110
5111 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5112 {
5113 add_phi_args_after_copy_edge (e_copy);
5114 }
5115 }
5116
5117 /* Blocks in REGION_COPY array of length N_REGION were created by
5118 duplication of basic blocks. Add phi node arguments for edges
5119 going from these blocks. If E_COPY is not NULL, also add
5120 phi node arguments for its destination.*/
5121
5122 void
5123 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5124 edge e_copy)
5125 {
5126 unsigned i;
5127
5128 for (i = 0; i < n_region; i++)
5129 region_copy[i]->flags |= BB_DUPLICATED;
5130
5131 for (i = 0; i < n_region; i++)
5132 add_phi_args_after_copy_bb (region_copy[i]);
5133 if (e_copy)
5134 add_phi_args_after_copy_edge (e_copy);
5135
5136 for (i = 0; i < n_region; i++)
5137 region_copy[i]->flags &= ~BB_DUPLICATED;
5138 }
5139
5140 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5141 important exit edge EXIT. By important we mean that no SSA name defined
5142 inside region is live over the other exit edges of the region. All entry
5143 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5144 to the duplicate of the region. SSA form, dominance and loop information
5145 is updated. The new basic blocks are stored to REGION_COPY in the same
5146 order as they had in REGION, provided that REGION_COPY is not NULL.
5147 The function returns false if it is unable to copy the region,
5148 true otherwise. */
5149
5150 bool
5151 gimple_duplicate_sese_region (edge entry, edge exit,
5152 basic_block *region, unsigned n_region,
5153 basic_block *region_copy)
5154 {
5155 unsigned i;
5156 bool free_region_copy = false, copying_header = false;
5157 struct loop *loop = entry->dest->loop_father;
5158 edge exit_copy;
5159 VEC (basic_block, heap) *doms;
5160 edge redirected;
5161 int total_freq = 0, entry_freq = 0;
5162 gcov_type total_count = 0, entry_count = 0;
5163
5164 if (!can_copy_bbs_p (region, n_region))
5165 return false;
5166
5167 /* Some sanity checking. Note that we do not check for all possible
5168 missuses of the functions. I.e. if you ask to copy something weird,
5169 it will work, but the state of structures probably will not be
5170 correct. */
5171 for (i = 0; i < n_region; i++)
5172 {
5173 /* We do not handle subloops, i.e. all the blocks must belong to the
5174 same loop. */
5175 if (region[i]->loop_father != loop)
5176 return false;
5177
5178 if (region[i] != entry->dest
5179 && region[i] == loop->header)
5180 return false;
5181 }
5182
5183 set_loop_copy (loop, loop);
5184
5185 /* In case the function is used for loop header copying (which is the primary
5186 use), ensure that EXIT and its copy will be new latch and entry edges. */
5187 if (loop->header == entry->dest)
5188 {
5189 copying_header = true;
5190 set_loop_copy (loop, loop_outer (loop));
5191
5192 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5193 return false;
5194
5195 for (i = 0; i < n_region; i++)
5196 if (region[i] != exit->src
5197 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5198 return false;
5199 }
5200
5201 if (!region_copy)
5202 {
5203 region_copy = XNEWVEC (basic_block, n_region);
5204 free_region_copy = true;
5205 }
5206
5207 gcc_assert (!need_ssa_update_p (cfun));
5208
5209 /* Record blocks outside the region that are dominated by something
5210 inside. */
5211 doms = NULL;
5212 initialize_original_copy_tables ();
5213
5214 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5215
5216 if (entry->dest->count)
5217 {
5218 total_count = entry->dest->count;
5219 entry_count = entry->count;
5220 /* Fix up corner cases, to avoid division by zero or creation of negative
5221 frequencies. */
5222 if (entry_count > total_count)
5223 entry_count = total_count;
5224 }
5225 else
5226 {
5227 total_freq = entry->dest->frequency;
5228 entry_freq = EDGE_FREQUENCY (entry);
5229 /* Fix up corner cases, to avoid division by zero or creation of negative
5230 frequencies. */
5231 if (total_freq == 0)
5232 total_freq = 1;
5233 else if (entry_freq > total_freq)
5234 entry_freq = total_freq;
5235 }
5236
5237 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5238 split_edge_bb_loc (entry));
5239 if (total_count)
5240 {
5241 scale_bbs_frequencies_gcov_type (region, n_region,
5242 total_count - entry_count,
5243 total_count);
5244 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5245 total_count);
5246 }
5247 else
5248 {
5249 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5250 total_freq);
5251 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5252 }
5253
5254 if (copying_header)
5255 {
5256 loop->header = exit->dest;
5257 loop->latch = exit->src;
5258 }
5259
5260 /* Redirect the entry and add the phi node arguments. */
5261 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5262 gcc_assert (redirected != NULL);
5263 flush_pending_stmts (entry);
5264
5265 /* Concerning updating of dominators: We must recount dominators
5266 for entry block and its copy. Anything that is outside of the
5267 region, but was dominated by something inside needs recounting as
5268 well. */
5269 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5270 VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
5271 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5272 VEC_free (basic_block, heap, doms);
5273
5274 /* Add the other PHI node arguments. */
5275 add_phi_args_after_copy (region_copy, n_region, NULL);
5276
5277 /* Update the SSA web. */
5278 update_ssa (TODO_update_ssa);
5279
5280 if (free_region_copy)
5281 free (region_copy);
5282
5283 free_original_copy_tables ();
5284 return true;
5285 }
5286
5287 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5288 are stored to REGION_COPY in the same order in that they appear
5289 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5290 the region, EXIT an exit from it. The condition guarding EXIT
5291 is moved to ENTRY. Returns true if duplication succeeds, false
5292 otherwise.
5293
5294 For example,
5295
5296 some_code;
5297 if (cond)
5298 A;
5299 else
5300 B;
5301
5302 is transformed to
5303
5304 if (cond)
5305 {
5306 some_code;
5307 A;
5308 }
5309 else
5310 {
5311 some_code;
5312 B;
5313 }
5314 */
5315
5316 bool
5317 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5318 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5319 basic_block *region_copy ATTRIBUTE_UNUSED)
5320 {
5321 unsigned i;
5322 bool free_region_copy = false;
5323 struct loop *loop = exit->dest->loop_father;
5324 struct loop *orig_loop = entry->dest->loop_father;
5325 basic_block switch_bb, entry_bb, nentry_bb;
5326 VEC (basic_block, heap) *doms;
5327 int total_freq = 0, exit_freq = 0;
5328 gcov_type total_count = 0, exit_count = 0;
5329 edge exits[2], nexits[2], e;
5330 gimple_stmt_iterator gsi,gsi1;
5331 gimple cond_stmt;
5332 edge sorig, snew;
5333 basic_block exit_bb;
5334 basic_block iters_bb;
5335 tree new_rhs;
5336 gimple_stmt_iterator psi;
5337 gimple phi;
5338 tree def;
5339
5340 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5341 exits[0] = exit;
5342 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5343
5344 if (!can_copy_bbs_p (region, n_region))
5345 return false;
5346
5347 initialize_original_copy_tables ();
5348 set_loop_copy (orig_loop, loop);
5349 duplicate_subloops (orig_loop, loop);
5350
5351 if (!region_copy)
5352 {
5353 region_copy = XNEWVEC (basic_block, n_region);
5354 free_region_copy = true;
5355 }
5356
5357 gcc_assert (!need_ssa_update_p (cfun));
5358
5359 /* Record blocks outside the region that are dominated by something
5360 inside. */
5361 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5362
5363 if (exit->src->count)
5364 {
5365 total_count = exit->src->count;
5366 exit_count = exit->count;
5367 /* Fix up corner cases, to avoid division by zero or creation of negative
5368 frequencies. */
5369 if (exit_count > total_count)
5370 exit_count = total_count;
5371 }
5372 else
5373 {
5374 total_freq = exit->src->frequency;
5375 exit_freq = EDGE_FREQUENCY (exit);
5376 /* Fix up corner cases, to avoid division by zero or creation of negative
5377 frequencies. */
5378 if (total_freq == 0)
5379 total_freq = 1;
5380 if (exit_freq > total_freq)
5381 exit_freq = total_freq;
5382 }
5383
5384 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5385 split_edge_bb_loc (exit));
5386 if (total_count)
5387 {
5388 scale_bbs_frequencies_gcov_type (region, n_region,
5389 total_count - exit_count,
5390 total_count);
5391 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5392 total_count);
5393 }
5394 else
5395 {
5396 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5397 total_freq);
5398 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5399 }
5400
5401 /* Create the switch block, and put the exit condition to it. */
5402 entry_bb = entry->dest;
5403 nentry_bb = get_bb_copy (entry_bb);
5404 if (!last_stmt (entry->src)
5405 || !stmt_ends_bb_p (last_stmt (entry->src)))
5406 switch_bb = entry->src;
5407 else
5408 switch_bb = split_edge (entry);
5409 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5410
5411 gsi = gsi_last_bb (switch_bb);
5412 cond_stmt = last_stmt (exit->src);
5413 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5414 cond_stmt = gimple_copy (cond_stmt);
5415
5416 /* If the block consisting of the exit condition has the latch as
5417 successor, then the body of the loop is executed before
5418 the exit condition is tested. In such case, moving the
5419 condition to the entry, causes that the loop will iterate
5420 one less iteration (which is the wanted outcome, since we
5421 peel out the last iteration). If the body is executed after
5422 the condition, moving the condition to the entry requires
5423 decrementing one iteration. */
5424 if (exits[1]->dest == orig_loop->latch)
5425 new_rhs = gimple_cond_rhs (cond_stmt);
5426 else
5427 {
5428 new_rhs = fold_build2 (MINUS_EXPR, TREE_TYPE (gimple_cond_rhs (cond_stmt)),
5429 gimple_cond_rhs (cond_stmt),
5430 build_int_cst (TREE_TYPE (gimple_cond_rhs (cond_stmt)), 1));
5431
5432 if (TREE_CODE (gimple_cond_rhs (cond_stmt)) == SSA_NAME)
5433 {
5434 iters_bb = gimple_bb (SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)));
5435 for (gsi1 = gsi_start_bb (iters_bb); !gsi_end_p (gsi1); gsi_next (&gsi1))
5436 if (gsi_stmt (gsi1) == SSA_NAME_DEF_STMT (gimple_cond_rhs (cond_stmt)))
5437 break;
5438
5439 new_rhs = force_gimple_operand_gsi (&gsi1, new_rhs, true,
5440 NULL_TREE,false,GSI_CONTINUE_LINKING);
5441 }
5442 }
5443 gimple_cond_set_rhs (cond_stmt, unshare_expr (new_rhs));
5444 gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
5445 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5446
5447 sorig = single_succ_edge (switch_bb);
5448 sorig->flags = exits[1]->flags;
5449 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5450
5451 /* Register the new edge from SWITCH_BB in loop exit lists. */
5452 rescan_loop_exit (snew, true, false);
5453
5454 /* Add the PHI node arguments. */
5455 add_phi_args_after_copy (region_copy, n_region, snew);
5456
5457 /* Get rid of now superfluous conditions and associated edges (and phi node
5458 arguments). */
5459 exit_bb = exit->dest;
5460
5461 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5462 PENDING_STMT (e) = NULL;
5463
5464 /* The latch of ORIG_LOOP was copied, and so was the backedge
5465 to the original header. We redirect this backedge to EXIT_BB. */
5466 for (i = 0; i < n_region; i++)
5467 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5468 {
5469 gcc_assert (single_succ_edge (region_copy[i]));
5470 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
5471 PENDING_STMT (e) = NULL;
5472 for (psi = gsi_start_phis (exit_bb);
5473 !gsi_end_p (psi);
5474 gsi_next (&psi))
5475 {
5476 phi = gsi_stmt (psi);
5477 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
5478 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
5479 }
5480 }
5481 e = redirect_edge_and_branch (nexits[0], nexits[1]->dest);
5482 PENDING_STMT (e) = NULL;
5483
5484 /* Anything that is outside of the region, but was dominated by something
5485 inside needs to update dominance info. */
5486 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5487 VEC_free (basic_block, heap, doms);
5488 /* Update the SSA web. */
5489 update_ssa (TODO_update_ssa);
5490
5491 if (free_region_copy)
5492 free (region_copy);
5493
5494 free_original_copy_tables ();
5495 return true;
5496 }
5497
5498 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
5499 adding blocks when the dominator traversal reaches EXIT. This
5500 function silently assumes that ENTRY strictly dominates EXIT. */
5501
5502 void
5503 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
5504 VEC(basic_block,heap) **bbs_p)
5505 {
5506 basic_block son;
5507
5508 for (son = first_dom_son (CDI_DOMINATORS, entry);
5509 son;
5510 son = next_dom_son (CDI_DOMINATORS, son))
5511 {
5512 VEC_safe_push (basic_block, heap, *bbs_p, son);
5513 if (son != exit)
5514 gather_blocks_in_sese_region (son, exit, bbs_p);
5515 }
5516 }
5517
5518 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
5519 The duplicates are recorded in VARS_MAP. */
5520
5521 static void
5522 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
5523 tree to_context)
5524 {
5525 tree t = *tp, new_t;
5526 struct function *f = DECL_STRUCT_FUNCTION (to_context);
5527 void **loc;
5528
5529 if (DECL_CONTEXT (t) == to_context)
5530 return;
5531
5532 loc = pointer_map_contains (vars_map, t);
5533
5534 if (!loc)
5535 {
5536 loc = pointer_map_insert (vars_map, t);
5537
5538 if (SSA_VAR_P (t))
5539 {
5540 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
5541 f->local_decls = tree_cons (NULL_TREE, new_t, f->local_decls);
5542 }
5543 else
5544 {
5545 gcc_assert (TREE_CODE (t) == CONST_DECL);
5546 new_t = copy_node (t);
5547 }
5548 DECL_CONTEXT (new_t) = to_context;
5549
5550 *loc = new_t;
5551 }
5552 else
5553 new_t = (tree) *loc;
5554
5555 *tp = new_t;
5556 }
5557
5558
5559 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
5560 VARS_MAP maps old ssa names and var_decls to the new ones. */
5561
5562 static tree
5563 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
5564 tree to_context)
5565 {
5566 void **loc;
5567 tree new_name, decl = SSA_NAME_VAR (name);
5568
5569 gcc_assert (is_gimple_reg (name));
5570
5571 loc = pointer_map_contains (vars_map, name);
5572
5573 if (!loc)
5574 {
5575 replace_by_duplicate_decl (&decl, vars_map, to_context);
5576
5577 push_cfun (DECL_STRUCT_FUNCTION (to_context));
5578 if (gimple_in_ssa_p (cfun))
5579 add_referenced_var (decl);
5580
5581 new_name = make_ssa_name (decl, SSA_NAME_DEF_STMT (name));
5582 if (SSA_NAME_IS_DEFAULT_DEF (name))
5583 set_default_def (decl, new_name);
5584 pop_cfun ();
5585
5586 loc = pointer_map_insert (vars_map, name);
5587 *loc = new_name;
5588 }
5589 else
5590 new_name = (tree) *loc;
5591
5592 return new_name;
5593 }
5594
5595 struct move_stmt_d
5596 {
5597 tree orig_block;
5598 tree new_block;
5599 tree from_context;
5600 tree to_context;
5601 struct pointer_map_t *vars_map;
5602 htab_t new_label_map;
5603 struct pointer_map_t *eh_map;
5604 bool remap_decls_p;
5605 };
5606
5607 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
5608 contained in *TP if it has been ORIG_BLOCK previously and change the
5609 DECL_CONTEXT of every local variable referenced in *TP. */
5610
5611 static tree
5612 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
5613 {
5614 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5615 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5616 tree t = *tp;
5617
5618 if (EXPR_P (t))
5619 /* We should never have TREE_BLOCK set on non-statements. */
5620 gcc_assert (!TREE_BLOCK (t));
5621
5622 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
5623 {
5624 if (TREE_CODE (t) == SSA_NAME)
5625 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
5626 else if (TREE_CODE (t) == LABEL_DECL)
5627 {
5628 if (p->new_label_map)
5629 {
5630 struct tree_map in, *out;
5631 in.base.from = t;
5632 out = (struct tree_map *)
5633 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
5634 if (out)
5635 *tp = t = out->to;
5636 }
5637
5638 DECL_CONTEXT (t) = p->to_context;
5639 }
5640 else if (p->remap_decls_p)
5641 {
5642 /* Replace T with its duplicate. T should no longer appear in the
5643 parent function, so this looks wasteful; however, it may appear
5644 in referenced_vars, and more importantly, as virtual operands of
5645 statements, and in alias lists of other variables. It would be
5646 quite difficult to expunge it from all those places. ??? It might
5647 suffice to do this for addressable variables. */
5648 if ((TREE_CODE (t) == VAR_DECL
5649 && !is_global_var (t))
5650 || TREE_CODE (t) == CONST_DECL)
5651 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
5652
5653 if (SSA_VAR_P (t)
5654 && gimple_in_ssa_p (cfun))
5655 {
5656 push_cfun (DECL_STRUCT_FUNCTION (p->to_context));
5657 add_referenced_var (*tp);
5658 pop_cfun ();
5659 }
5660 }
5661 *walk_subtrees = 0;
5662 }
5663 else if (TYPE_P (t))
5664 *walk_subtrees = 0;
5665
5666 return NULL_TREE;
5667 }
5668
5669 /* Helper for move_stmt_r. Given an EH region number for the source
5670 function, map that to the duplicate EH regio number in the dest. */
5671
5672 static int
5673 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
5674 {
5675 eh_region old_r, new_r;
5676 void **slot;
5677
5678 old_r = get_eh_region_from_number (old_nr);
5679 slot = pointer_map_contains (p->eh_map, old_r);
5680 new_r = (eh_region) *slot;
5681
5682 return new_r->index;
5683 }
5684
5685 /* Similar, but operate on INTEGER_CSTs. */
5686
5687 static tree
5688 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
5689 {
5690 int old_nr, new_nr;
5691
5692 old_nr = tree_low_cst (old_t_nr, 0);
5693 new_nr = move_stmt_eh_region_nr (old_nr, p);
5694
5695 return build_int_cst (NULL, new_nr);
5696 }
5697
5698 /* Like move_stmt_op, but for gimple statements.
5699
5700 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
5701 contained in the current statement in *GSI_P and change the
5702 DECL_CONTEXT of every local variable referenced in the current
5703 statement. */
5704
5705 static tree
5706 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
5707 struct walk_stmt_info *wi)
5708 {
5709 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
5710 gimple stmt = gsi_stmt (*gsi_p);
5711 tree block = gimple_block (stmt);
5712
5713 if (p->orig_block == NULL_TREE
5714 || block == p->orig_block
5715 || block == NULL_TREE)
5716 gimple_set_block (stmt, p->new_block);
5717 #ifdef ENABLE_CHECKING
5718 else if (block != p->new_block)
5719 {
5720 while (block && block != p->orig_block)
5721 block = BLOCK_SUPERCONTEXT (block);
5722 gcc_assert (block);
5723 }
5724 #endif
5725
5726 switch (gimple_code (stmt))
5727 {
5728 case GIMPLE_CALL:
5729 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
5730 {
5731 tree r, fndecl = gimple_call_fndecl (stmt);
5732 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5733 switch (DECL_FUNCTION_CODE (fndecl))
5734 {
5735 case BUILT_IN_EH_COPY_VALUES:
5736 r = gimple_call_arg (stmt, 1);
5737 r = move_stmt_eh_region_tree_nr (r, p);
5738 gimple_call_set_arg (stmt, 1, r);
5739 /* FALLTHRU */
5740
5741 case BUILT_IN_EH_POINTER:
5742 case BUILT_IN_EH_FILTER:
5743 r = gimple_call_arg (stmt, 0);
5744 r = move_stmt_eh_region_tree_nr (r, p);
5745 gimple_call_set_arg (stmt, 0, r);
5746 break;
5747
5748 default:
5749 break;
5750 }
5751 }
5752 break;
5753
5754 case GIMPLE_RESX:
5755 {
5756 int r = gimple_resx_region (stmt);
5757 r = move_stmt_eh_region_nr (r, p);
5758 gimple_resx_set_region (stmt, r);
5759 }
5760 break;
5761
5762 case GIMPLE_EH_DISPATCH:
5763 {
5764 int r = gimple_eh_dispatch_region (stmt);
5765 r = move_stmt_eh_region_nr (r, p);
5766 gimple_eh_dispatch_set_region (stmt, r);
5767 }
5768 break;
5769
5770 case GIMPLE_OMP_RETURN:
5771 case GIMPLE_OMP_CONTINUE:
5772 break;
5773 default:
5774 if (is_gimple_omp (stmt))
5775 {
5776 /* Do not remap variables inside OMP directives. Variables
5777 referenced in clauses and directive header belong to the
5778 parent function and should not be moved into the child
5779 function. */
5780 bool save_remap_decls_p = p->remap_decls_p;
5781 p->remap_decls_p = false;
5782 *handled_ops_p = true;
5783
5784 walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r,
5785 move_stmt_op, wi);
5786
5787 p->remap_decls_p = save_remap_decls_p;
5788 }
5789 break;
5790 }
5791
5792 return NULL_TREE;
5793 }
5794
5795 /* Marks virtual operands of all statements in basic blocks BBS for
5796 renaming. */
5797
5798 void
5799 mark_virtual_ops_in_bb (basic_block bb)
5800 {
5801 gimple_stmt_iterator gsi;
5802
5803 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5804 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5805
5806 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5807 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
5808 }
5809
5810 /* Move basic block BB from function CFUN to function DEST_FN. The
5811 block is moved out of the original linked list and placed after
5812 block AFTER in the new list. Also, the block is removed from the
5813 original array of blocks and placed in DEST_FN's array of blocks.
5814 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
5815 updated to reflect the moved edges.
5816
5817 The local variables are remapped to new instances, VARS_MAP is used
5818 to record the mapping. */
5819
5820 static void
5821 move_block_to_fn (struct function *dest_cfun, basic_block bb,
5822 basic_block after, bool update_edge_count_p,
5823 struct move_stmt_d *d)
5824 {
5825 struct control_flow_graph *cfg;
5826 edge_iterator ei;
5827 edge e;
5828 gimple_stmt_iterator si;
5829 unsigned old_len, new_len;
5830
5831 /* Remove BB from dominance structures. */
5832 delete_from_dominance_info (CDI_DOMINATORS, bb);
5833 if (current_loops)
5834 remove_bb_from_loops (bb);
5835
5836 /* Link BB to the new linked list. */
5837 move_block_after (bb, after);
5838
5839 /* Update the edge count in the corresponding flowgraphs. */
5840 if (update_edge_count_p)
5841 FOR_EACH_EDGE (e, ei, bb->succs)
5842 {
5843 cfun->cfg->x_n_edges--;
5844 dest_cfun->cfg->x_n_edges++;
5845 }
5846
5847 /* Remove BB from the original basic block array. */
5848 VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
5849 cfun->cfg->x_n_basic_blocks--;
5850
5851 /* Grow DEST_CFUN's basic block array if needed. */
5852 cfg = dest_cfun->cfg;
5853 cfg->x_n_basic_blocks++;
5854 if (bb->index >= cfg->x_last_basic_block)
5855 cfg->x_last_basic_block = bb->index + 1;
5856
5857 old_len = VEC_length (basic_block, cfg->x_basic_block_info);
5858 if ((unsigned) cfg->x_last_basic_block >= old_len)
5859 {
5860 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
5861 VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
5862 new_len);
5863 }
5864
5865 VEC_replace (basic_block, cfg->x_basic_block_info,
5866 bb->index, bb);
5867
5868 /* Remap the variables in phi nodes. */
5869 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
5870 {
5871 gimple phi = gsi_stmt (si);
5872 use_operand_p use;
5873 tree op = PHI_RESULT (phi);
5874 ssa_op_iter oi;
5875
5876 if (!is_gimple_reg (op))
5877 {
5878 /* Remove the phi nodes for virtual operands (alias analysis will be
5879 run for the new function, anyway). */
5880 remove_phi_node (&si, true);
5881 continue;
5882 }
5883
5884 SET_PHI_RESULT (phi,
5885 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5886 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
5887 {
5888 op = USE_FROM_PTR (use);
5889 if (TREE_CODE (op) == SSA_NAME)
5890 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
5891 }
5892
5893 gsi_next (&si);
5894 }
5895
5896 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5897 {
5898 gimple stmt = gsi_stmt (si);
5899 struct walk_stmt_info wi;
5900
5901 memset (&wi, 0, sizeof (wi));
5902 wi.info = d;
5903 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
5904
5905 if (gimple_code (stmt) == GIMPLE_LABEL)
5906 {
5907 tree label = gimple_label_label (stmt);
5908 int uid = LABEL_DECL_UID (label);
5909
5910 gcc_assert (uid > -1);
5911
5912 old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
5913 if (old_len <= (unsigned) uid)
5914 {
5915 new_len = 3 * uid / 2 + 1;
5916 VEC_safe_grow_cleared (basic_block, gc,
5917 cfg->x_label_to_block_map, new_len);
5918 }
5919
5920 VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
5921 VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
5922
5923 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
5924
5925 if (uid >= dest_cfun->cfg->last_label_uid)
5926 dest_cfun->cfg->last_label_uid = uid + 1;
5927 }
5928
5929 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
5930 remove_stmt_from_eh_lp_fn (cfun, stmt);
5931
5932 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
5933 gimple_remove_stmt_histograms (cfun, stmt);
5934
5935 /* We cannot leave any operands allocated from the operand caches of
5936 the current function. */
5937 free_stmt_operands (stmt);
5938 push_cfun (dest_cfun);
5939 update_stmt (stmt);
5940 pop_cfun ();
5941 }
5942
5943 FOR_EACH_EDGE (e, ei, bb->succs)
5944 if (e->goto_locus)
5945 {
5946 tree block = e->goto_block;
5947 if (d->orig_block == NULL_TREE
5948 || block == d->orig_block)
5949 e->goto_block = d->new_block;
5950 #ifdef ENABLE_CHECKING
5951 else if (block != d->new_block)
5952 {
5953 while (block && block != d->orig_block)
5954 block = BLOCK_SUPERCONTEXT (block);
5955 gcc_assert (block);
5956 }
5957 #endif
5958 }
5959 }
5960
5961 /* Examine the statements in BB (which is in SRC_CFUN); find and return
5962 the outermost EH region. Use REGION as the incoming base EH region. */
5963
5964 static eh_region
5965 find_outermost_region_in_block (struct function *src_cfun,
5966 basic_block bb, eh_region region)
5967 {
5968 gimple_stmt_iterator si;
5969
5970 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5971 {
5972 gimple stmt = gsi_stmt (si);
5973 eh_region stmt_region;
5974 int lp_nr;
5975
5976 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
5977 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
5978 if (stmt_region)
5979 {
5980 if (region == NULL)
5981 region = stmt_region;
5982 else if (stmt_region != region)
5983 {
5984 region = eh_region_outermost (src_cfun, stmt_region, region);
5985 gcc_assert (region != NULL);
5986 }
5987 }
5988 }
5989
5990 return region;
5991 }
5992
5993 static tree
5994 new_label_mapper (tree decl, void *data)
5995 {
5996 htab_t hash = (htab_t) data;
5997 struct tree_map *m;
5998 void **slot;
5999
6000 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6001
6002 m = XNEW (struct tree_map);
6003 m->hash = DECL_UID (decl);
6004 m->base.from = decl;
6005 m->to = create_artificial_label (UNKNOWN_LOCATION);
6006 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6007 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6008 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6009
6010 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6011 gcc_assert (*slot == NULL);
6012
6013 *slot = m;
6014
6015 return m->to;
6016 }
6017
6018 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6019 subblocks. */
6020
6021 static void
6022 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6023 tree to_context)
6024 {
6025 tree *tp, t;
6026
6027 for (tp = &BLOCK_VARS (block); *tp; tp = &TREE_CHAIN (*tp))
6028 {
6029 t = *tp;
6030 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6031 continue;
6032 replace_by_duplicate_decl (&t, vars_map, to_context);
6033 if (t != *tp)
6034 {
6035 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6036 {
6037 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6038 DECL_HAS_VALUE_EXPR_P (t) = 1;
6039 }
6040 TREE_CHAIN (t) = TREE_CHAIN (*tp);
6041 *tp = t;
6042 }
6043 }
6044
6045 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6046 replace_block_vars_by_duplicates (block, vars_map, to_context);
6047 }
6048
6049 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6050 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6051 single basic block in the original CFG and the new basic block is
6052 returned. DEST_CFUN must not have a CFG yet.
6053
6054 Note that the region need not be a pure SESE region. Blocks inside
6055 the region may contain calls to abort/exit. The only restriction
6056 is that ENTRY_BB should be the only entry point and it must
6057 dominate EXIT_BB.
6058
6059 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6060 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6061 to the new function.
6062
6063 All local variables referenced in the region are assumed to be in
6064 the corresponding BLOCK_VARS and unexpanded variable lists
6065 associated with DEST_CFUN. */
6066
6067 basic_block
6068 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6069 basic_block exit_bb, tree orig_block)
6070 {
6071 VEC(basic_block,heap) *bbs, *dom_bbs;
6072 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6073 basic_block after, bb, *entry_pred, *exit_succ, abb;
6074 struct function *saved_cfun = cfun;
6075 int *entry_flag, *exit_flag;
6076 unsigned *entry_prob, *exit_prob;
6077 unsigned i, num_entry_edges, num_exit_edges;
6078 edge e;
6079 edge_iterator ei;
6080 htab_t new_label_map;
6081 struct pointer_map_t *vars_map, *eh_map;
6082 struct loop *loop = entry_bb->loop_father;
6083 struct move_stmt_d d;
6084
6085 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6086 region. */
6087 gcc_assert (entry_bb != exit_bb
6088 && (!exit_bb
6089 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6090
6091 /* Collect all the blocks in the region. Manually add ENTRY_BB
6092 because it won't be added by dfs_enumerate_from. */
6093 bbs = NULL;
6094 VEC_safe_push (basic_block, heap, bbs, entry_bb);
6095 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6096
6097 /* The blocks that used to be dominated by something in BBS will now be
6098 dominated by the new block. */
6099 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6100 VEC_address (basic_block, bbs),
6101 VEC_length (basic_block, bbs));
6102
6103 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6104 the predecessor edges to ENTRY_BB and the successor edges to
6105 EXIT_BB so that we can re-attach them to the new basic block that
6106 will replace the region. */
6107 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6108 entry_pred = (basic_block *) xcalloc (num_entry_edges, sizeof (basic_block));
6109 entry_flag = (int *) xcalloc (num_entry_edges, sizeof (int));
6110 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6111 i = 0;
6112 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6113 {
6114 entry_prob[i] = e->probability;
6115 entry_flag[i] = e->flags;
6116 entry_pred[i++] = e->src;
6117 remove_edge (e);
6118 }
6119
6120 if (exit_bb)
6121 {
6122 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6123 exit_succ = (basic_block *) xcalloc (num_exit_edges,
6124 sizeof (basic_block));
6125 exit_flag = (int *) xcalloc (num_exit_edges, sizeof (int));
6126 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6127 i = 0;
6128 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6129 {
6130 exit_prob[i] = e->probability;
6131 exit_flag[i] = e->flags;
6132 exit_succ[i++] = e->dest;
6133 remove_edge (e);
6134 }
6135 }
6136 else
6137 {
6138 num_exit_edges = 0;
6139 exit_succ = NULL;
6140 exit_flag = NULL;
6141 exit_prob = NULL;
6142 }
6143
6144 /* Switch context to the child function to initialize DEST_FN's CFG. */
6145 gcc_assert (dest_cfun->cfg == NULL);
6146 push_cfun (dest_cfun);
6147
6148 init_empty_tree_cfg ();
6149
6150 /* Initialize EH information for the new function. */
6151 eh_map = NULL;
6152 new_label_map = NULL;
6153 if (saved_cfun->eh)
6154 {
6155 eh_region region = NULL;
6156
6157 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6158 region = find_outermost_region_in_block (saved_cfun, bb, region);
6159
6160 init_eh_for_function ();
6161 if (region != NULL)
6162 {
6163 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6164 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6165 new_label_mapper, new_label_map);
6166 }
6167 }
6168
6169 pop_cfun ();
6170
6171 /* Move blocks from BBS into DEST_CFUN. */
6172 gcc_assert (VEC_length (basic_block, bbs) >= 2);
6173 after = dest_cfun->cfg->x_entry_block_ptr;
6174 vars_map = pointer_map_create ();
6175
6176 memset (&d, 0, sizeof (d));
6177 d.orig_block = orig_block;
6178 d.new_block = DECL_INITIAL (dest_cfun->decl);
6179 d.from_context = cfun->decl;
6180 d.to_context = dest_cfun->decl;
6181 d.vars_map = vars_map;
6182 d.new_label_map = new_label_map;
6183 d.eh_map = eh_map;
6184 d.remap_decls_p = true;
6185
6186 for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
6187 {
6188 /* No need to update edge counts on the last block. It has
6189 already been updated earlier when we detached the region from
6190 the original CFG. */
6191 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6192 after = bb;
6193 }
6194
6195 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6196 if (orig_block)
6197 {
6198 tree block;
6199 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6200 == NULL_TREE);
6201 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6202 = BLOCK_SUBBLOCKS (orig_block);
6203 for (block = BLOCK_SUBBLOCKS (orig_block);
6204 block; block = BLOCK_CHAIN (block))
6205 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6206 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6207 }
6208
6209 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6210 vars_map, dest_cfun->decl);
6211
6212 if (new_label_map)
6213 htab_delete (new_label_map);
6214 if (eh_map)
6215 pointer_map_destroy (eh_map);
6216 pointer_map_destroy (vars_map);
6217
6218 /* Rewire the entry and exit blocks. The successor to the entry
6219 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6220 the child function. Similarly, the predecessor of DEST_FN's
6221 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6222 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6223 various CFG manipulation function get to the right CFG.
6224
6225 FIXME, this is silly. The CFG ought to become a parameter to
6226 these helpers. */
6227 push_cfun (dest_cfun);
6228 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6229 if (exit_bb)
6230 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6231 pop_cfun ();
6232
6233 /* Back in the original function, the SESE region has disappeared,
6234 create a new basic block in its place. */
6235 bb = create_empty_bb (entry_pred[0]);
6236 if (current_loops)
6237 add_bb_to_loop (bb, loop);
6238 for (i = 0; i < num_entry_edges; i++)
6239 {
6240 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6241 e->probability = entry_prob[i];
6242 }
6243
6244 for (i = 0; i < num_exit_edges; i++)
6245 {
6246 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6247 e->probability = exit_prob[i];
6248 }
6249
6250 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6251 for (i = 0; VEC_iterate (basic_block, dom_bbs, i, abb); i++)
6252 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6253 VEC_free (basic_block, heap, dom_bbs);
6254
6255 if (exit_bb)
6256 {
6257 free (exit_prob);
6258 free (exit_flag);
6259 free (exit_succ);
6260 }
6261 free (entry_prob);
6262 free (entry_flag);
6263 free (entry_pred);
6264 VEC_free (basic_block, heap, bbs);
6265
6266 return bb;
6267 }
6268
6269
6270 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
6271 */
6272
6273 void
6274 dump_function_to_file (tree fn, FILE *file, int flags)
6275 {
6276 tree arg, vars, var;
6277 struct function *dsf;
6278 bool ignore_topmost_bind = false, any_var = false;
6279 basic_block bb;
6280 tree chain;
6281
6282 fprintf (file, "%s (", lang_hooks.decl_printable_name (fn, 2));
6283
6284 arg = DECL_ARGUMENTS (fn);
6285 while (arg)
6286 {
6287 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6288 fprintf (file, " ");
6289 print_generic_expr (file, arg, dump_flags);
6290 if (flags & TDF_VERBOSE)
6291 print_node (file, "", arg, 4);
6292 if (TREE_CHAIN (arg))
6293 fprintf (file, ", ");
6294 arg = TREE_CHAIN (arg);
6295 }
6296 fprintf (file, ")\n");
6297
6298 if (flags & TDF_VERBOSE)
6299 print_node (file, "", fn, 2);
6300
6301 dsf = DECL_STRUCT_FUNCTION (fn);
6302 if (dsf && (flags & TDF_EH))
6303 dump_eh_tree (file, dsf);
6304
6305 if (flags & TDF_RAW && !gimple_has_body_p (fn))
6306 {
6307 dump_node (fn, TDF_SLIM | flags, file);
6308 return;
6309 }
6310
6311 /* Switch CFUN to point to FN. */
6312 push_cfun (DECL_STRUCT_FUNCTION (fn));
6313
6314 /* When GIMPLE is lowered, the variables are no longer available in
6315 BIND_EXPRs, so display them separately. */
6316 if (cfun && cfun->decl == fn && cfun->local_decls)
6317 {
6318 ignore_topmost_bind = true;
6319
6320 fprintf (file, "{\n");
6321 for (vars = cfun->local_decls; vars; vars = TREE_CHAIN (vars))
6322 {
6323 var = TREE_VALUE (vars);
6324
6325 print_generic_decl (file, var, flags);
6326 if (flags & TDF_VERBOSE)
6327 print_node (file, "", var, 4);
6328 fprintf (file, "\n");
6329
6330 any_var = true;
6331 }
6332 }
6333
6334 if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
6335 {
6336 /* If the CFG has been built, emit a CFG-based dump. */
6337 check_bb_profile (ENTRY_BLOCK_PTR, file);
6338 if (!ignore_topmost_bind)
6339 fprintf (file, "{\n");
6340
6341 if (any_var && n_basic_blocks)
6342 fprintf (file, "\n");
6343
6344 FOR_EACH_BB (bb)
6345 gimple_dump_bb (bb, file, 2, flags);
6346
6347 fprintf (file, "}\n");
6348 check_bb_profile (EXIT_BLOCK_PTR, file);
6349 }
6350 else if (DECL_SAVED_TREE (fn) == NULL)
6351 {
6352 /* The function is now in GIMPLE form but the CFG has not been
6353 built yet. Emit the single sequence of GIMPLE statements
6354 that make up its body. */
6355 gimple_seq body = gimple_body (fn);
6356
6357 if (gimple_seq_first_stmt (body)
6358 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
6359 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
6360 print_gimple_seq (file, body, 0, flags);
6361 else
6362 {
6363 if (!ignore_topmost_bind)
6364 fprintf (file, "{\n");
6365
6366 if (any_var)
6367 fprintf (file, "\n");
6368
6369 print_gimple_seq (file, body, 2, flags);
6370 fprintf (file, "}\n");
6371 }
6372 }
6373 else
6374 {
6375 int indent;
6376
6377 /* Make a tree based dump. */
6378 chain = DECL_SAVED_TREE (fn);
6379
6380 if (chain && TREE_CODE (chain) == BIND_EXPR)
6381 {
6382 if (ignore_topmost_bind)
6383 {
6384 chain = BIND_EXPR_BODY (chain);
6385 indent = 2;
6386 }
6387 else
6388 indent = 0;
6389 }
6390 else
6391 {
6392 if (!ignore_topmost_bind)
6393 fprintf (file, "{\n");
6394 indent = 2;
6395 }
6396
6397 if (any_var)
6398 fprintf (file, "\n");
6399
6400 print_generic_stmt_indented (file, chain, flags, indent);
6401 if (ignore_topmost_bind)
6402 fprintf (file, "}\n");
6403 }
6404
6405 fprintf (file, "\n\n");
6406
6407 /* Restore CFUN. */
6408 pop_cfun ();
6409 }
6410
6411
6412 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
6413
6414 DEBUG_FUNCTION void
6415 debug_function (tree fn, int flags)
6416 {
6417 dump_function_to_file (fn, stderr, flags);
6418 }
6419
6420
6421 /* Print on FILE the indexes for the predecessors of basic_block BB. */
6422
6423 static void
6424 print_pred_bbs (FILE *file, basic_block bb)
6425 {
6426 edge e;
6427 edge_iterator ei;
6428
6429 FOR_EACH_EDGE (e, ei, bb->preds)
6430 fprintf (file, "bb_%d ", e->src->index);
6431 }
6432
6433
6434 /* Print on FILE the indexes for the successors of basic_block BB. */
6435
6436 static void
6437 print_succ_bbs (FILE *file, basic_block bb)
6438 {
6439 edge e;
6440 edge_iterator ei;
6441
6442 FOR_EACH_EDGE (e, ei, bb->succs)
6443 fprintf (file, "bb_%d ", e->dest->index);
6444 }
6445
6446 /* Print to FILE the basic block BB following the VERBOSITY level. */
6447
6448 void
6449 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
6450 {
6451 char *s_indent = (char *) alloca ((size_t) indent + 1);
6452 memset ((void *) s_indent, ' ', (size_t) indent);
6453 s_indent[indent] = '\0';
6454
6455 /* Print basic_block's header. */
6456 if (verbosity >= 2)
6457 {
6458 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
6459 print_pred_bbs (file, bb);
6460 fprintf (file, "}, succs = {");
6461 print_succ_bbs (file, bb);
6462 fprintf (file, "})\n");
6463 }
6464
6465 /* Print basic_block's body. */
6466 if (verbosity >= 3)
6467 {
6468 fprintf (file, "%s {\n", s_indent);
6469 gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
6470 fprintf (file, "%s }\n", s_indent);
6471 }
6472 }
6473
6474 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
6475
6476 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
6477 VERBOSITY level this outputs the contents of the loop, or just its
6478 structure. */
6479
6480 static void
6481 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
6482 {
6483 char *s_indent;
6484 basic_block bb;
6485
6486 if (loop == NULL)
6487 return;
6488
6489 s_indent = (char *) alloca ((size_t) indent + 1);
6490 memset ((void *) s_indent, ' ', (size_t) indent);
6491 s_indent[indent] = '\0';
6492
6493 /* Print loop's header. */
6494 fprintf (file, "%sloop_%d (header = %d, latch = %d", s_indent,
6495 loop->num, loop->header->index, loop->latch->index);
6496 fprintf (file, ", niter = ");
6497 print_generic_expr (file, loop->nb_iterations, 0);
6498
6499 if (loop->any_upper_bound)
6500 {
6501 fprintf (file, ", upper_bound = ");
6502 dump_double_int (file, loop->nb_iterations_upper_bound, true);
6503 }
6504
6505 if (loop->any_estimate)
6506 {
6507 fprintf (file, ", estimate = ");
6508 dump_double_int (file, loop->nb_iterations_estimate, true);
6509 }
6510 fprintf (file, ")\n");
6511
6512 /* Print loop's body. */
6513 if (verbosity >= 1)
6514 {
6515 fprintf (file, "%s{\n", s_indent);
6516 FOR_EACH_BB (bb)
6517 if (bb->loop_father == loop)
6518 print_loops_bb (file, bb, indent, verbosity);
6519
6520 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
6521 fprintf (file, "%s}\n", s_indent);
6522 }
6523 }
6524
6525 /* Print the LOOP and its sibling loops on FILE, indented INDENT
6526 spaces. Following VERBOSITY level this outputs the contents of the
6527 loop, or just its structure. */
6528
6529 static void
6530 print_loop_and_siblings (FILE *file, struct loop *loop, int indent, int verbosity)
6531 {
6532 if (loop == NULL)
6533 return;
6534
6535 print_loop (file, loop, indent, verbosity);
6536 print_loop_and_siblings (file, loop->next, indent, verbosity);
6537 }
6538
6539 /* Follow a CFG edge from the entry point of the program, and on entry
6540 of a loop, pretty print the loop structure on FILE. */
6541
6542 void
6543 print_loops (FILE *file, int verbosity)
6544 {
6545 basic_block bb;
6546
6547 bb = ENTRY_BLOCK_PTR;
6548 if (bb && bb->loop_father)
6549 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
6550 }
6551
6552
6553 /* Debugging loops structure at tree level, at some VERBOSITY level. */
6554
6555 DEBUG_FUNCTION void
6556 debug_loops (int verbosity)
6557 {
6558 print_loops (stderr, verbosity);
6559 }
6560
6561 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
6562
6563 DEBUG_FUNCTION void
6564 debug_loop (struct loop *loop, int verbosity)
6565 {
6566 print_loop (stderr, loop, 0, verbosity);
6567 }
6568
6569 /* Print on stderr the code of loop number NUM, at some VERBOSITY
6570 level. */
6571
6572 DEBUG_FUNCTION void
6573 debug_loop_num (unsigned num, int verbosity)
6574 {
6575 debug_loop (get_loop (num), verbosity);
6576 }
6577
6578 /* Return true if BB ends with a call, possibly followed by some
6579 instructions that must stay with the call. Return false,
6580 otherwise. */
6581
6582 static bool
6583 gimple_block_ends_with_call_p (basic_block bb)
6584 {
6585 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6586 return is_gimple_call (gsi_stmt (gsi));
6587 }
6588
6589
6590 /* Return true if BB ends with a conditional branch. Return false,
6591 otherwise. */
6592
6593 static bool
6594 gimple_block_ends_with_condjump_p (const_basic_block bb)
6595 {
6596 gimple stmt = last_stmt (CONST_CAST_BB (bb));
6597 return (stmt && gimple_code (stmt) == GIMPLE_COND);
6598 }
6599
6600
6601 /* Return true if we need to add fake edge to exit at statement T.
6602 Helper function for gimple_flow_call_edges_add. */
6603
6604 static bool
6605 need_fake_edge_p (gimple t)
6606 {
6607 tree fndecl = NULL_TREE;
6608 int call_flags = 0;
6609
6610 /* NORETURN and LONGJMP calls already have an edge to exit.
6611 CONST and PURE calls do not need one.
6612 We don't currently check for CONST and PURE here, although
6613 it would be a good idea, because those attributes are
6614 figured out from the RTL in mark_constant_function, and
6615 the counter incrementation code from -fprofile-arcs
6616 leads to different results from -fbranch-probabilities. */
6617 if (is_gimple_call (t))
6618 {
6619 fndecl = gimple_call_fndecl (t);
6620 call_flags = gimple_call_flags (t);
6621 }
6622
6623 if (is_gimple_call (t)
6624 && fndecl
6625 && DECL_BUILT_IN (fndecl)
6626 && (call_flags & ECF_NOTHROW)
6627 && !(call_flags & ECF_RETURNS_TWICE)
6628 /* fork() doesn't really return twice, but the effect of
6629 wrapping it in __gcov_fork() which calls __gcov_flush()
6630 and clears the counters before forking has the same
6631 effect as returning twice. Force a fake edge. */
6632 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6633 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
6634 return false;
6635
6636 if (is_gimple_call (t)
6637 && !(call_flags & ECF_NORETURN))
6638 return true;
6639
6640 if (gimple_code (t) == GIMPLE_ASM
6641 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
6642 return true;
6643
6644 return false;
6645 }
6646
6647
6648 /* Add fake edges to the function exit for any non constant and non
6649 noreturn calls, volatile inline assembly in the bitmap of blocks
6650 specified by BLOCKS or to the whole CFG if BLOCKS is zero. Return
6651 the number of blocks that were split.
6652
6653 The goal is to expose cases in which entering a basic block does
6654 not imply that all subsequent instructions must be executed. */
6655
6656 static int
6657 gimple_flow_call_edges_add (sbitmap blocks)
6658 {
6659 int i;
6660 int blocks_split = 0;
6661 int last_bb = last_basic_block;
6662 bool check_last_block = false;
6663
6664 if (n_basic_blocks == NUM_FIXED_BLOCKS)
6665 return 0;
6666
6667 if (! blocks)
6668 check_last_block = true;
6669 else
6670 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
6671
6672 /* In the last basic block, before epilogue generation, there will be
6673 a fallthru edge to EXIT. Special care is required if the last insn
6674 of the last basic block is a call because make_edge folds duplicate
6675 edges, which would result in the fallthru edge also being marked
6676 fake, which would result in the fallthru edge being removed by
6677 remove_fake_edges, which would result in an invalid CFG.
6678
6679 Moreover, we can't elide the outgoing fake edge, since the block
6680 profiler needs to take this into account in order to solve the minimal
6681 spanning tree in the case that the call doesn't return.
6682
6683 Handle this by adding a dummy instruction in a new last basic block. */
6684 if (check_last_block)
6685 {
6686 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
6687 gimple_stmt_iterator gsi = gsi_last_bb (bb);
6688 gimple t = NULL;
6689
6690 if (!gsi_end_p (gsi))
6691 t = gsi_stmt (gsi);
6692
6693 if (t && need_fake_edge_p (t))
6694 {
6695 edge e;
6696
6697 e = find_edge (bb, EXIT_BLOCK_PTR);
6698 if (e)
6699 {
6700 gsi_insert_on_edge (e, gimple_build_nop ());
6701 gsi_commit_edge_inserts ();
6702 }
6703 }
6704 }
6705
6706 /* Now add fake edges to the function exit for any non constant
6707 calls since there is no way that we can determine if they will
6708 return or not... */
6709 for (i = 0; i < last_bb; i++)
6710 {
6711 basic_block bb = BASIC_BLOCK (i);
6712 gimple_stmt_iterator gsi;
6713 gimple stmt, last_stmt;
6714
6715 if (!bb)
6716 continue;
6717
6718 if (blocks && !TEST_BIT (blocks, i))
6719 continue;
6720
6721 gsi = gsi_last_bb (bb);
6722 if (!gsi_end_p (gsi))
6723 {
6724 last_stmt = gsi_stmt (gsi);
6725 do
6726 {
6727 stmt = gsi_stmt (gsi);
6728 if (need_fake_edge_p (stmt))
6729 {
6730 edge e;
6731
6732 /* The handling above of the final block before the
6733 epilogue should be enough to verify that there is
6734 no edge to the exit block in CFG already.
6735 Calling make_edge in such case would cause us to
6736 mark that edge as fake and remove it later. */
6737 #ifdef ENABLE_CHECKING
6738 if (stmt == last_stmt)
6739 {
6740 e = find_edge (bb, EXIT_BLOCK_PTR);
6741 gcc_assert (e == NULL);
6742 }
6743 #endif
6744
6745 /* Note that the following may create a new basic block
6746 and renumber the existing basic blocks. */
6747 if (stmt != last_stmt)
6748 {
6749 e = split_block (bb, stmt);
6750 if (e)
6751 blocks_split++;
6752 }
6753 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
6754 }
6755 gsi_prev (&gsi);
6756 }
6757 while (!gsi_end_p (gsi));
6758 }
6759 }
6760
6761 if (blocks_split)
6762 verify_flow_info ();
6763
6764 return blocks_split;
6765 }
6766
6767 /* Purge dead abnormal call edges from basic block BB. */
6768
6769 bool
6770 gimple_purge_dead_abnormal_call_edges (basic_block bb)
6771 {
6772 bool changed = gimple_purge_dead_eh_edges (bb);
6773
6774 if (cfun->has_nonlocal_label)
6775 {
6776 gimple stmt = last_stmt (bb);
6777 edge_iterator ei;
6778 edge e;
6779
6780 if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
6781 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6782 {
6783 if (e->flags & EDGE_ABNORMAL)
6784 {
6785 remove_edge (e);
6786 changed = true;
6787 }
6788 else
6789 ei_next (&ei);
6790 }
6791
6792 /* See gimple_purge_dead_eh_edges below. */
6793 if (changed)
6794 free_dominance_info (CDI_DOMINATORS);
6795 }
6796
6797 return changed;
6798 }
6799
6800 /* Removes edge E and all the blocks dominated by it, and updates dominance
6801 information. The IL in E->src needs to be updated separately.
6802 If dominance info is not available, only the edge E is removed.*/
6803
6804 void
6805 remove_edge_and_dominated_blocks (edge e)
6806 {
6807 VEC (basic_block, heap) *bbs_to_remove = NULL;
6808 VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
6809 bitmap df, df_idom;
6810 edge f;
6811 edge_iterator ei;
6812 bool none_removed = false;
6813 unsigned i;
6814 basic_block bb, dbb;
6815 bitmap_iterator bi;
6816
6817 if (!dom_info_available_p (CDI_DOMINATORS))
6818 {
6819 remove_edge (e);
6820 return;
6821 }
6822
6823 /* No updating is needed for edges to exit. */
6824 if (e->dest == EXIT_BLOCK_PTR)
6825 {
6826 if (cfgcleanup_altered_bbs)
6827 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6828 remove_edge (e);
6829 return;
6830 }
6831
6832 /* First, we find the basic blocks to remove. If E->dest has a predecessor
6833 that is not dominated by E->dest, then this set is empty. Otherwise,
6834 all the basic blocks dominated by E->dest are removed.
6835
6836 Also, to DF_IDOM we store the immediate dominators of the blocks in
6837 the dominance frontier of E (i.e., of the successors of the
6838 removed blocks, if there are any, and of E->dest otherwise). */
6839 FOR_EACH_EDGE (f, ei, e->dest->preds)
6840 {
6841 if (f == e)
6842 continue;
6843
6844 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
6845 {
6846 none_removed = true;
6847 break;
6848 }
6849 }
6850
6851 df = BITMAP_ALLOC (NULL);
6852 df_idom = BITMAP_ALLOC (NULL);
6853
6854 if (none_removed)
6855 bitmap_set_bit (df_idom,
6856 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
6857 else
6858 {
6859 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
6860 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6861 {
6862 FOR_EACH_EDGE (f, ei, bb->succs)
6863 {
6864 if (f->dest != EXIT_BLOCK_PTR)
6865 bitmap_set_bit (df, f->dest->index);
6866 }
6867 }
6868 for (i = 0; VEC_iterate (basic_block, bbs_to_remove, i, bb); i++)
6869 bitmap_clear_bit (df, bb->index);
6870
6871 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
6872 {
6873 bb = BASIC_BLOCK (i);
6874 bitmap_set_bit (df_idom,
6875 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
6876 }
6877 }
6878
6879 if (cfgcleanup_altered_bbs)
6880 {
6881 /* Record the set of the altered basic blocks. */
6882 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
6883 bitmap_ior_into (cfgcleanup_altered_bbs, df);
6884 }
6885
6886 /* Remove E and the cancelled blocks. */
6887 if (none_removed)
6888 remove_edge (e);
6889 else
6890 {
6891 /* Walk backwards so as to get a chance to substitute all
6892 released DEFs into debug stmts. See
6893 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
6894 details. */
6895 for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
6896 delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
6897 }
6898
6899 /* Update the dominance information. The immediate dominator may change only
6900 for blocks whose immediate dominator belongs to DF_IDOM:
6901
6902 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
6903 removal. Let Z the arbitrary block such that idom(Z) = Y and
6904 Z dominates X after the removal. Before removal, there exists a path P
6905 from Y to X that avoids Z. Let F be the last edge on P that is
6906 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
6907 dominates W, and because of P, Z does not dominate W), and W belongs to
6908 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
6909 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
6910 {
6911 bb = BASIC_BLOCK (i);
6912 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
6913 dbb;
6914 dbb = next_dom_son (CDI_DOMINATORS, dbb))
6915 VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
6916 }
6917
6918 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
6919
6920 BITMAP_FREE (df);
6921 BITMAP_FREE (df_idom);
6922 VEC_free (basic_block, heap, bbs_to_remove);
6923 VEC_free (basic_block, heap, bbs_to_fix_dom);
6924 }
6925
6926 /* Purge dead EH edges from basic block BB. */
6927
6928 bool
6929 gimple_purge_dead_eh_edges (basic_block bb)
6930 {
6931 bool changed = false;
6932 edge e;
6933 edge_iterator ei;
6934 gimple stmt = last_stmt (bb);
6935
6936 if (stmt && stmt_can_throw_internal (stmt))
6937 return false;
6938
6939 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6940 {
6941 if (e->flags & EDGE_EH)
6942 {
6943 remove_edge_and_dominated_blocks (e);
6944 changed = true;
6945 }
6946 else
6947 ei_next (&ei);
6948 }
6949
6950 return changed;
6951 }
6952
6953 bool
6954 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
6955 {
6956 bool changed = false;
6957 unsigned i;
6958 bitmap_iterator bi;
6959
6960 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
6961 {
6962 basic_block bb = BASIC_BLOCK (i);
6963
6964 /* Earlier gimple_purge_dead_eh_edges could have removed
6965 this basic block already. */
6966 gcc_assert (bb || changed);
6967 if (bb != NULL)
6968 changed |= gimple_purge_dead_eh_edges (bb);
6969 }
6970
6971 return changed;
6972 }
6973
6974 /* This function is called whenever a new edge is created or
6975 redirected. */
6976
6977 static void
6978 gimple_execute_on_growing_pred (edge e)
6979 {
6980 basic_block bb = e->dest;
6981
6982 if (!gimple_seq_empty_p (phi_nodes (bb)))
6983 reserve_phi_args_for_new_edge (bb);
6984 }
6985
6986 /* This function is called immediately before edge E is removed from
6987 the edge vector E->dest->preds. */
6988
6989 static void
6990 gimple_execute_on_shrinking_pred (edge e)
6991 {
6992 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
6993 remove_phi_args (e);
6994 }
6995
6996 /*---------------------------------------------------------------------------
6997 Helper functions for Loop versioning
6998 ---------------------------------------------------------------------------*/
6999
7000 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7001 of 'first'. Both of them are dominated by 'new_head' basic block. When
7002 'new_head' was created by 'second's incoming edge it received phi arguments
7003 on the edge by split_edge(). Later, additional edge 'e' was created to
7004 connect 'new_head' and 'first'. Now this routine adds phi args on this
7005 additional edge 'e' that new_head to second edge received as part of edge
7006 splitting. */
7007
7008 static void
7009 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7010 basic_block new_head, edge e)
7011 {
7012 gimple phi1, phi2;
7013 gimple_stmt_iterator psi1, psi2;
7014 tree def;
7015 edge e2 = find_edge (new_head, second);
7016
7017 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7018 edge, we should always have an edge from NEW_HEAD to SECOND. */
7019 gcc_assert (e2 != NULL);
7020
7021 /* Browse all 'second' basic block phi nodes and add phi args to
7022 edge 'e' for 'first' head. PHI args are always in correct order. */
7023
7024 for (psi2 = gsi_start_phis (second),
7025 psi1 = gsi_start_phis (first);
7026 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7027 gsi_next (&psi2), gsi_next (&psi1))
7028 {
7029 phi1 = gsi_stmt (psi1);
7030 phi2 = gsi_stmt (psi2);
7031 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7032 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7033 }
7034 }
7035
7036
7037 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7038 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7039 the destination of the ELSE part. */
7040
7041 static void
7042 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7043 basic_block second_head ATTRIBUTE_UNUSED,
7044 basic_block cond_bb, void *cond_e)
7045 {
7046 gimple_stmt_iterator gsi;
7047 gimple new_cond_expr;
7048 tree cond_expr = (tree) cond_e;
7049 edge e0;
7050
7051 /* Build new conditional expr */
7052 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7053 NULL_TREE, NULL_TREE);
7054
7055 /* Add new cond in cond_bb. */
7056 gsi = gsi_last_bb (cond_bb);
7057 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7058
7059 /* Adjust edges appropriately to connect new head with first head
7060 as well as second head. */
7061 e0 = single_succ_edge (cond_bb);
7062 e0->flags &= ~EDGE_FALLTHRU;
7063 e0->flags |= EDGE_FALSE_VALUE;
7064 }
7065
7066 struct cfg_hooks gimple_cfg_hooks = {
7067 "gimple",
7068 gimple_verify_flow_info,
7069 gimple_dump_bb, /* dump_bb */
7070 create_bb, /* create_basic_block */
7071 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7072 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7073 gimple_can_remove_branch_p, /* can_remove_branch_p */
7074 remove_bb, /* delete_basic_block */
7075 gimple_split_block, /* split_block */
7076 gimple_move_block_after, /* move_block_after */
7077 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7078 gimple_merge_blocks, /* merge_blocks */
7079 gimple_predict_edge, /* predict_edge */
7080 gimple_predicted_by_p, /* predicted_by_p */
7081 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7082 gimple_duplicate_bb, /* duplicate_block */
7083 gimple_split_edge, /* split_edge */
7084 gimple_make_forwarder_block, /* make_forward_block */
7085 NULL, /* tidy_fallthru_edge */
7086 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7087 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7088 gimple_flow_call_edges_add, /* flow_call_edges_add */
7089 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7090 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7091 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7092 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7093 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7094 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7095 flush_pending_stmts /* flush_pending_stmts */
7096 };
7097
7098
7099 /* Split all critical edges. */
7100
7101 static unsigned int
7102 split_critical_edges (void)
7103 {
7104 basic_block bb;
7105 edge e;
7106 edge_iterator ei;
7107
7108 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7109 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7110 mappings around the calls to split_edge. */
7111 start_recording_case_labels ();
7112 FOR_ALL_BB (bb)
7113 {
7114 FOR_EACH_EDGE (e, ei, bb->succs)
7115 {
7116 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7117 split_edge (e);
7118 /* PRE inserts statements to edges and expects that
7119 since split_critical_edges was done beforehand, committing edge
7120 insertions will not split more edges. In addition to critical
7121 edges we must split edges that have multiple successors and
7122 end by control flow statements, such as RESX.
7123 Go ahead and split them too. This matches the logic in
7124 gimple_find_edge_insert_loc. */
7125 else if ((!single_pred_p (e->dest)
7126 || !gimple_seq_empty_p (phi_nodes (e->dest))
7127 || e->dest == EXIT_BLOCK_PTR)
7128 && e->src != ENTRY_BLOCK_PTR
7129 && !(e->flags & EDGE_ABNORMAL))
7130 {
7131 gimple_stmt_iterator gsi;
7132
7133 gsi = gsi_last_bb (e->src);
7134 if (!gsi_end_p (gsi)
7135 && stmt_ends_bb_p (gsi_stmt (gsi))
7136 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7137 && !gimple_call_builtin_p (gsi_stmt (gsi),
7138 BUILT_IN_RETURN)))
7139 split_edge (e);
7140 }
7141 }
7142 }
7143 end_recording_case_labels ();
7144 return 0;
7145 }
7146
7147 struct gimple_opt_pass pass_split_crit_edges =
7148 {
7149 {
7150 GIMPLE_PASS,
7151 "crited", /* name */
7152 NULL, /* gate */
7153 split_critical_edges, /* execute */
7154 NULL, /* sub */
7155 NULL, /* next */
7156 0, /* static_pass_number */
7157 TV_TREE_SPLIT_EDGES, /* tv_id */
7158 PROP_cfg, /* properties required */
7159 PROP_no_crit_edges, /* properties_provided */
7160 0, /* properties_destroyed */
7161 0, /* todo_flags_start */
7162 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
7163 }
7164 };
7165
7166
7167 /* Build a ternary operation and gimplify it. Emit code before GSI.
7168 Return the gimple_val holding the result. */
7169
7170 tree
7171 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7172 tree type, tree a, tree b, tree c)
7173 {
7174 tree ret;
7175 location_t loc = gimple_location (gsi_stmt (*gsi));
7176
7177 ret = fold_build3_loc (loc, code, type, a, b, c);
7178 STRIP_NOPS (ret);
7179
7180 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7181 GSI_SAME_STMT);
7182 }
7183
7184 /* Build a binary operation and gimplify it. Emit code before GSI.
7185 Return the gimple_val holding the result. */
7186
7187 tree
7188 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7189 tree type, tree a, tree b)
7190 {
7191 tree ret;
7192
7193 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7194 STRIP_NOPS (ret);
7195
7196 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7197 GSI_SAME_STMT);
7198 }
7199
7200 /* Build a unary operation and gimplify it. Emit code before GSI.
7201 Return the gimple_val holding the result. */
7202
7203 tree
7204 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7205 tree a)
7206 {
7207 tree ret;
7208
7209 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7210 STRIP_NOPS (ret);
7211
7212 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7213 GSI_SAME_STMT);
7214 }
7215
7216
7217 \f
7218 /* Emit return warnings. */
7219
7220 static unsigned int
7221 execute_warn_function_return (void)
7222 {
7223 source_location location;
7224 gimple last;
7225 edge e;
7226 edge_iterator ei;
7227
7228 /* If we have a path to EXIT, then we do return. */
7229 if (TREE_THIS_VOLATILE (cfun->decl)
7230 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
7231 {
7232 location = UNKNOWN_LOCATION;
7233 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7234 {
7235 last = last_stmt (e->src);
7236 if ((gimple_code (last) == GIMPLE_RETURN
7237 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
7238 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
7239 break;
7240 }
7241 if (location == UNKNOWN_LOCATION)
7242 location = cfun->function_end_locus;
7243 warning_at (location, 0, "%<noreturn%> function does return");
7244 }
7245
7246 /* If we see "return;" in some basic block, then we do reach the end
7247 without returning a value. */
7248 else if (warn_return_type
7249 && !TREE_NO_WARNING (cfun->decl)
7250 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
7251 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
7252 {
7253 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
7254 {
7255 gimple last = last_stmt (e->src);
7256 if (gimple_code (last) == GIMPLE_RETURN
7257 && gimple_return_retval (last) == NULL
7258 && !gimple_no_warning_p (last))
7259 {
7260 location = gimple_location (last);
7261 if (location == UNKNOWN_LOCATION)
7262 location = cfun->function_end_locus;
7263 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
7264 TREE_NO_WARNING (cfun->decl) = 1;
7265 break;
7266 }
7267 }
7268 }
7269 return 0;
7270 }
7271
7272
7273 /* Given a basic block B which ends with a conditional and has
7274 precisely two successors, determine which of the edges is taken if
7275 the conditional is true and which is taken if the conditional is
7276 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
7277
7278 void
7279 extract_true_false_edges_from_block (basic_block b,
7280 edge *true_edge,
7281 edge *false_edge)
7282 {
7283 edge e = EDGE_SUCC (b, 0);
7284
7285 if (e->flags & EDGE_TRUE_VALUE)
7286 {
7287 *true_edge = e;
7288 *false_edge = EDGE_SUCC (b, 1);
7289 }
7290 else
7291 {
7292 *false_edge = e;
7293 *true_edge = EDGE_SUCC (b, 1);
7294 }
7295 }
7296
7297 struct gimple_opt_pass pass_warn_function_return =
7298 {
7299 {
7300 GIMPLE_PASS,
7301 "*warn_function_return", /* name */
7302 NULL, /* gate */
7303 execute_warn_function_return, /* execute */
7304 NULL, /* sub */
7305 NULL, /* next */
7306 0, /* static_pass_number */
7307 TV_NONE, /* tv_id */
7308 PROP_cfg, /* properties_required */
7309 0, /* properties_provided */
7310 0, /* properties_destroyed */
7311 0, /* todo_flags_start */
7312 0 /* todo_flags_finish */
7313 }
7314 };
7315
7316 /* Emit noreturn warnings. */
7317
7318 static unsigned int
7319 execute_warn_function_noreturn (void)
7320 {
7321 if (!TREE_THIS_VOLATILE (current_function_decl)
7322 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
7323 warn_function_noreturn (current_function_decl);
7324 return 0;
7325 }
7326
7327 static bool
7328 gate_warn_function_noreturn (void)
7329 {
7330 return warn_suggest_attribute_noreturn;
7331 }
7332
7333 struct gimple_opt_pass pass_warn_function_noreturn =
7334 {
7335 {
7336 GIMPLE_PASS,
7337 "*warn_function_noreturn", /* name */
7338 gate_warn_function_noreturn, /* gate */
7339 execute_warn_function_noreturn, /* execute */
7340 NULL, /* sub */
7341 NULL, /* next */
7342 0, /* static_pass_number */
7343 TV_NONE, /* tv_id */
7344 PROP_cfg, /* properties_required */
7345 0, /* properties_provided */
7346 0, /* properties_destroyed */
7347 0, /* todo_flags_start */
7348 0 /* todo_flags_finish */
7349 }
7350 };
7351
7352
7353 /* Walk a gimplified function and warn for functions whose return value is
7354 ignored and attribute((warn_unused_result)) is set. This is done before
7355 inlining, so we don't have to worry about that. */
7356
7357 static void
7358 do_warn_unused_result (gimple_seq seq)
7359 {
7360 tree fdecl, ftype;
7361 gimple_stmt_iterator i;
7362
7363 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
7364 {
7365 gimple g = gsi_stmt (i);
7366
7367 switch (gimple_code (g))
7368 {
7369 case GIMPLE_BIND:
7370 do_warn_unused_result (gimple_bind_body (g));
7371 break;
7372 case GIMPLE_TRY:
7373 do_warn_unused_result (gimple_try_eval (g));
7374 do_warn_unused_result (gimple_try_cleanup (g));
7375 break;
7376 case GIMPLE_CATCH:
7377 do_warn_unused_result (gimple_catch_handler (g));
7378 break;
7379 case GIMPLE_EH_FILTER:
7380 do_warn_unused_result (gimple_eh_filter_failure (g));
7381 break;
7382
7383 case GIMPLE_CALL:
7384 if (gimple_call_lhs (g))
7385 break;
7386
7387 /* This is a naked call, as opposed to a GIMPLE_CALL with an
7388 LHS. All calls whose value is ignored should be
7389 represented like this. Look for the attribute. */
7390 fdecl = gimple_call_fndecl (g);
7391 ftype = TREE_TYPE (TREE_TYPE (gimple_call_fn (g)));
7392
7393 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
7394 {
7395 location_t loc = gimple_location (g);
7396
7397 if (fdecl)
7398 warning_at (loc, OPT_Wunused_result,
7399 "ignoring return value of %qD, "
7400 "declared with attribute warn_unused_result",
7401 fdecl);
7402 else
7403 warning_at (loc, OPT_Wunused_result,
7404 "ignoring return value of function "
7405 "declared with attribute warn_unused_result");
7406 }
7407 break;
7408
7409 default:
7410 /* Not a container, not a call, or a call whose value is used. */
7411 break;
7412 }
7413 }
7414 }
7415
7416 static unsigned int
7417 run_warn_unused_result (void)
7418 {
7419 do_warn_unused_result (gimple_body (current_function_decl));
7420 return 0;
7421 }
7422
7423 static bool
7424 gate_warn_unused_result (void)
7425 {
7426 return flag_warn_unused_result;
7427 }
7428
7429 struct gimple_opt_pass pass_warn_unused_result =
7430 {
7431 {
7432 GIMPLE_PASS,
7433 "*warn_unused_result", /* name */
7434 gate_warn_unused_result, /* gate */
7435 run_warn_unused_result, /* execute */
7436 NULL, /* sub */
7437 NULL, /* next */
7438 0, /* static_pass_number */
7439 TV_NONE, /* tv_id */
7440 PROP_gimple_any, /* properties_required */
7441 0, /* properties_provided */
7442 0, /* properties_destroyed */
7443 0, /* todo_flags_start */
7444 0, /* todo_flags_finish */
7445 }
7446 };
7447