tree-flow.h: Remove all remaining prototypes...
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-ssa.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "diagnostic-core.h"
37 #include "except.h"
38 #include "cfgloop.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "pointer-set.h"
42 #include "tree-inline.h"
43 #include "target.h"
44 #include "tree-ssa-live.h"
45
46 /* This file contains functions for building the Control Flow Graph (CFG)
47 for a function tree. */
48
49 /* Local declarations. */
50
51 /* Initial capacity for the basic block array. */
52 static const int initial_cfg_capacity = 20;
53
54 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
55 which use a particular edge. The CASE_LABEL_EXPRs are chained together
56 via their CASE_CHAIN field, which we clear after we're done with the
57 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
58
59 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
60 update the case vector in response to edge redirections.
61
62 Right now this table is set up and torn down at key points in the
63 compilation process. It would be nice if we could make the table
64 more persistent. The key is getting notification of changes to
65 the CFG (particularly edge removal, creation and redirection). */
66
67 static struct pointer_map_t *edge_to_cases;
68
69 /* If we record edge_to_cases, this bitmap will hold indexes
70 of basic blocks that end in a GIMPLE_SWITCH which we touched
71 due to edge manipulations. */
72
73 static bitmap touched_switch_bbs;
74
75 /* CFG statistics. */
76 struct cfg_stats_d
77 {
78 long num_merged_labels;
79 };
80
81 static struct cfg_stats_d cfg_stats;
82
83 /* Nonzero if we found a computed goto while building basic blocks. */
84 static bool found_computed_goto;
85
86 /* Hash table to store last discriminator assigned for each locus. */
87 struct locus_discrim_map
88 {
89 location_t locus;
90 int discriminator;
91 };
92
93 /* Hashtable helpers. */
94
95 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
96 {
97 typedef locus_discrim_map value_type;
98 typedef locus_discrim_map compare_type;
99 static inline hashval_t hash (const value_type *);
100 static inline bool equal (const value_type *, const compare_type *);
101 };
102
103 /* Trivial hash function for a location_t. ITEM is a pointer to
104 a hash table entry that maps a location_t to a discriminator. */
105
106 inline hashval_t
107 locus_discrim_hasher::hash (const value_type *item)
108 {
109 return LOCATION_LINE (item->locus);
110 }
111
112 /* Equality function for the locus-to-discriminator map. A and B
113 point to the two hash table entries to compare. */
114
115 inline bool
116 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
117 {
118 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
119 }
120
121 static hash_table <locus_discrim_hasher> discriminator_per_locus;
122
123 /* Basic blocks and flowgraphs. */
124 static void make_blocks (gimple_seq);
125 static void factor_computed_gotos (void);
126
127 /* Edges. */
128 static void make_edges (void);
129 static void assign_discriminators (void);
130 static void make_cond_expr_edges (basic_block);
131 static void make_gimple_switch_edges (basic_block);
132 static void make_goto_expr_edges (basic_block);
133 static void make_gimple_asm_edges (basic_block);
134 static edge gimple_redirect_edge_and_branch (edge, basic_block);
135 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
136 static unsigned int split_critical_edges (void);
137
138 /* Various helpers. */
139 static inline bool stmt_starts_bb_p (gimple, gimple);
140 static int gimple_verify_flow_info (void);
141 static void gimple_make_forwarder_block (edge);
142 static gimple first_non_label_stmt (basic_block);
143 static bool verify_gimple_transaction (gimple);
144
145 /* Flowgraph optimization and cleanup. */
146 static void gimple_merge_blocks (basic_block, basic_block);
147 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
148 static void remove_bb (basic_block);
149 static edge find_taken_edge_computed_goto (basic_block, tree);
150 static edge find_taken_edge_cond_expr (basic_block, tree);
151 static edge find_taken_edge_switch_expr (basic_block, tree);
152 static tree find_case_label_for_value (gimple, tree);
153
154 void
155 init_empty_tree_cfg_for_function (struct function *fn)
156 {
157 /* Initialize the basic block array. */
158 init_flow (fn);
159 profile_status_for_function (fn) = PROFILE_ABSENT;
160 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
161 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
162 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
163 vec_safe_grow_cleared (basic_block_info_for_function (fn),
164 initial_cfg_capacity);
165
166 /* Build a mapping of labels to their associated blocks. */
167 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
168 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
169 initial_cfg_capacity);
170
171 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
172 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
173 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
174 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
175
176 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
177 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
178 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
179 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
180 }
181
182 void
183 init_empty_tree_cfg (void)
184 {
185 init_empty_tree_cfg_for_function (cfun);
186 }
187
188 /*---------------------------------------------------------------------------
189 Create basic blocks
190 ---------------------------------------------------------------------------*/
191
192 /* Entry point to the CFG builder for trees. SEQ is the sequence of
193 statements to be added to the flowgraph. */
194
195 static void
196 build_gimple_cfg (gimple_seq seq)
197 {
198 /* Register specific gimple functions. */
199 gimple_register_cfg_hooks ();
200
201 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
202
203 init_empty_tree_cfg ();
204
205 found_computed_goto = 0;
206 make_blocks (seq);
207
208 /* Computed gotos are hell to deal with, especially if there are
209 lots of them with a large number of destinations. So we factor
210 them to a common computed goto location before we build the
211 edge list. After we convert back to normal form, we will un-factor
212 the computed gotos since factoring introduces an unwanted jump. */
213 if (found_computed_goto)
214 factor_computed_gotos ();
215
216 /* Make sure there is always at least one block, even if it's empty. */
217 if (n_basic_blocks == NUM_FIXED_BLOCKS)
218 create_empty_bb (ENTRY_BLOCK_PTR);
219
220 /* Adjust the size of the array. */
221 if (basic_block_info->length () < (size_t) n_basic_blocks)
222 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
223
224 /* To speed up statement iterator walks, we first purge dead labels. */
225 cleanup_dead_labels ();
226
227 /* Group case nodes to reduce the number of edges.
228 We do this after cleaning up dead labels because otherwise we miss
229 a lot of obvious case merging opportunities. */
230 group_case_labels ();
231
232 /* Create the edges of the flowgraph. */
233 discriminator_per_locus.create (13);
234 make_edges ();
235 assign_discriminators ();
236 cleanup_dead_labels ();
237 discriminator_per_locus.dispose ();
238 }
239
240 static unsigned int
241 execute_build_cfg (void)
242 {
243 gimple_seq body = gimple_body (current_function_decl);
244
245 build_gimple_cfg (body);
246 gimple_set_body (current_function_decl, NULL);
247 if (dump_file && (dump_flags & TDF_DETAILS))
248 {
249 fprintf (dump_file, "Scope blocks:\n");
250 dump_scope_blocks (dump_file, dump_flags);
251 }
252 cleanup_tree_cfg ();
253 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
254 return 0;
255 }
256
257 namespace {
258
259 const pass_data pass_data_build_cfg =
260 {
261 GIMPLE_PASS, /* type */
262 "cfg", /* name */
263 OPTGROUP_NONE, /* optinfo_flags */
264 false, /* has_gate */
265 true, /* has_execute */
266 TV_TREE_CFG, /* tv_id */
267 PROP_gimple_leh, /* properties_required */
268 ( PROP_cfg | PROP_loops ), /* properties_provided */
269 0, /* properties_destroyed */
270 0, /* todo_flags_start */
271 TODO_verify_stmts, /* todo_flags_finish */
272 };
273
274 class pass_build_cfg : public gimple_opt_pass
275 {
276 public:
277 pass_build_cfg (gcc::context *ctxt)
278 : gimple_opt_pass (pass_data_build_cfg, ctxt)
279 {}
280
281 /* opt_pass methods: */
282 unsigned int execute () { return execute_build_cfg (); }
283
284 }; // class pass_build_cfg
285
286 } // anon namespace
287
288 gimple_opt_pass *
289 make_pass_build_cfg (gcc::context *ctxt)
290 {
291 return new pass_build_cfg (ctxt);
292 }
293
294
295 /* Return true if T is a computed goto. */
296
297 static bool
298 computed_goto_p (gimple t)
299 {
300 return (gimple_code (t) == GIMPLE_GOTO
301 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
302 }
303
304
305 /* Search the CFG for any computed gotos. If found, factor them to a
306 common computed goto site. Also record the location of that site so
307 that we can un-factor the gotos after we have converted back to
308 normal form. */
309
310 static void
311 factor_computed_gotos (void)
312 {
313 basic_block bb;
314 tree factored_label_decl = NULL;
315 tree var = NULL;
316 gimple factored_computed_goto_label = NULL;
317 gimple factored_computed_goto = NULL;
318
319 /* We know there are one or more computed gotos in this function.
320 Examine the last statement in each basic block to see if the block
321 ends with a computed goto. */
322
323 FOR_EACH_BB (bb)
324 {
325 gimple_stmt_iterator gsi = gsi_last_bb (bb);
326 gimple last;
327
328 if (gsi_end_p (gsi))
329 continue;
330
331 last = gsi_stmt (gsi);
332
333 /* Ignore the computed goto we create when we factor the original
334 computed gotos. */
335 if (last == factored_computed_goto)
336 continue;
337
338 /* If the last statement is a computed goto, factor it. */
339 if (computed_goto_p (last))
340 {
341 gimple assignment;
342
343 /* The first time we find a computed goto we need to create
344 the factored goto block and the variable each original
345 computed goto will use for their goto destination. */
346 if (!factored_computed_goto)
347 {
348 basic_block new_bb = create_empty_bb (bb);
349 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
350
351 /* Create the destination of the factored goto. Each original
352 computed goto will put its desired destination into this
353 variable and jump to the label we create immediately
354 below. */
355 var = create_tmp_var (ptr_type_node, "gotovar");
356
357 /* Build a label for the new block which will contain the
358 factored computed goto. */
359 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
360 factored_computed_goto_label
361 = gimple_build_label (factored_label_decl);
362 gsi_insert_after (&new_gsi, factored_computed_goto_label,
363 GSI_NEW_STMT);
364
365 /* Build our new computed goto. */
366 factored_computed_goto = gimple_build_goto (var);
367 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
368 }
369
370 /* Copy the original computed goto's destination into VAR. */
371 assignment = gimple_build_assign (var, gimple_goto_dest (last));
372 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
373
374 /* And re-vector the computed goto to the new destination. */
375 gimple_goto_set_dest (last, factored_label_decl);
376 }
377 }
378 }
379
380
381 /* Build a flowgraph for the sequence of stmts SEQ. */
382
383 static void
384 make_blocks (gimple_seq seq)
385 {
386 gimple_stmt_iterator i = gsi_start (seq);
387 gimple stmt = NULL;
388 bool start_new_block = true;
389 bool first_stmt_of_seq = true;
390 basic_block bb = ENTRY_BLOCK_PTR;
391
392 while (!gsi_end_p (i))
393 {
394 gimple prev_stmt;
395
396 prev_stmt = stmt;
397 stmt = gsi_stmt (i);
398
399 /* If the statement starts a new basic block or if we have determined
400 in a previous pass that we need to create a new block for STMT, do
401 so now. */
402 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
403 {
404 if (!first_stmt_of_seq)
405 gsi_split_seq_before (&i, &seq);
406 bb = create_basic_block (seq, NULL, bb);
407 start_new_block = false;
408 }
409
410 /* Now add STMT to BB and create the subgraphs for special statement
411 codes. */
412 gimple_set_bb (stmt, bb);
413
414 if (computed_goto_p (stmt))
415 found_computed_goto = true;
416
417 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
418 next iteration. */
419 if (stmt_ends_bb_p (stmt))
420 {
421 /* If the stmt can make abnormal goto use a new temporary
422 for the assignment to the LHS. This makes sure the old value
423 of the LHS is available on the abnormal edge. Otherwise
424 we will end up with overlapping life-ranges for abnormal
425 SSA names. */
426 if (gimple_has_lhs (stmt)
427 && stmt_can_make_abnormal_goto (stmt)
428 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
429 {
430 tree lhs = gimple_get_lhs (stmt);
431 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
432 gimple s = gimple_build_assign (lhs, tmp);
433 gimple_set_location (s, gimple_location (stmt));
434 gimple_set_block (s, gimple_block (stmt));
435 gimple_set_lhs (stmt, tmp);
436 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
437 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
438 DECL_GIMPLE_REG_P (tmp) = 1;
439 gsi_insert_after (&i, s, GSI_SAME_STMT);
440 }
441 start_new_block = true;
442 }
443
444 gsi_next (&i);
445 first_stmt_of_seq = false;
446 }
447 }
448
449
450 /* Create and return a new empty basic block after bb AFTER. */
451
452 static basic_block
453 create_bb (void *h, void *e, basic_block after)
454 {
455 basic_block bb;
456
457 gcc_assert (!e);
458
459 /* Create and initialize a new basic block. Since alloc_block uses
460 GC allocation that clears memory to allocate a basic block, we do
461 not have to clear the newly allocated basic block here. */
462 bb = alloc_block ();
463
464 bb->index = last_basic_block;
465 bb->flags = BB_NEW;
466 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
467
468 /* Add the new block to the linked list of blocks. */
469 link_block (bb, after);
470
471 /* Grow the basic block array if needed. */
472 if ((size_t) last_basic_block == basic_block_info->length ())
473 {
474 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
475 vec_safe_grow_cleared (basic_block_info, new_size);
476 }
477
478 /* Add the newly created block to the array. */
479 SET_BASIC_BLOCK (last_basic_block, bb);
480
481 n_basic_blocks++;
482 last_basic_block++;
483
484 return bb;
485 }
486
487
488 /*---------------------------------------------------------------------------
489 Edge creation
490 ---------------------------------------------------------------------------*/
491
492 /* Fold COND_EXPR_COND of each COND_EXPR. */
493
494 void
495 fold_cond_expr_cond (void)
496 {
497 basic_block bb;
498
499 FOR_EACH_BB (bb)
500 {
501 gimple stmt = last_stmt (bb);
502
503 if (stmt && gimple_code (stmt) == GIMPLE_COND)
504 {
505 location_t loc = gimple_location (stmt);
506 tree cond;
507 bool zerop, onep;
508
509 fold_defer_overflow_warnings ();
510 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
511 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
512 if (cond)
513 {
514 zerop = integer_zerop (cond);
515 onep = integer_onep (cond);
516 }
517 else
518 zerop = onep = false;
519
520 fold_undefer_overflow_warnings (zerop || onep,
521 stmt,
522 WARN_STRICT_OVERFLOW_CONDITIONAL);
523 if (zerop)
524 gimple_cond_make_false (stmt);
525 else if (onep)
526 gimple_cond_make_true (stmt);
527 }
528 }
529 }
530
531 /* Join all the blocks in the flowgraph. */
532
533 static void
534 make_edges (void)
535 {
536 basic_block bb;
537 struct omp_region *cur_region = NULL;
538
539 /* Create an edge from entry to the first block with executable
540 statements in it. */
541 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
542
543 /* Traverse the basic block array placing edges. */
544 FOR_EACH_BB (bb)
545 {
546 gimple last = last_stmt (bb);
547 bool fallthru;
548
549 if (last)
550 {
551 enum gimple_code code = gimple_code (last);
552 switch (code)
553 {
554 case GIMPLE_GOTO:
555 make_goto_expr_edges (bb);
556 fallthru = false;
557 break;
558 case GIMPLE_RETURN:
559 make_edge (bb, EXIT_BLOCK_PTR, 0);
560 fallthru = false;
561 break;
562 case GIMPLE_COND:
563 make_cond_expr_edges (bb);
564 fallthru = false;
565 break;
566 case GIMPLE_SWITCH:
567 make_gimple_switch_edges (bb);
568 fallthru = false;
569 break;
570 case GIMPLE_RESX:
571 make_eh_edges (last);
572 fallthru = false;
573 break;
574 case GIMPLE_EH_DISPATCH:
575 fallthru = make_eh_dispatch_edges (last);
576 break;
577
578 case GIMPLE_CALL:
579 /* If this function receives a nonlocal goto, then we need to
580 make edges from this call site to all the nonlocal goto
581 handlers. */
582 if (stmt_can_make_abnormal_goto (last))
583 make_abnormal_goto_edges (bb, true);
584
585 /* If this statement has reachable exception handlers, then
586 create abnormal edges to them. */
587 make_eh_edges (last);
588
589 /* BUILTIN_RETURN is really a return statement. */
590 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
591 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
592 /* Some calls are known not to return. */
593 else
594 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
595 break;
596
597 case GIMPLE_ASSIGN:
598 /* A GIMPLE_ASSIGN may throw internally and thus be considered
599 control-altering. */
600 if (is_ctrl_altering_stmt (last))
601 make_eh_edges (last);
602 fallthru = true;
603 break;
604
605 case GIMPLE_ASM:
606 make_gimple_asm_edges (bb);
607 fallthru = true;
608 break;
609
610 case GIMPLE_OMP_PARALLEL:
611 case GIMPLE_OMP_TASK:
612 case GIMPLE_OMP_FOR:
613 case GIMPLE_OMP_SINGLE:
614 case GIMPLE_OMP_MASTER:
615 case GIMPLE_OMP_ORDERED:
616 case GIMPLE_OMP_CRITICAL:
617 case GIMPLE_OMP_SECTION:
618 cur_region = new_omp_region (bb, code, cur_region);
619 fallthru = true;
620 break;
621
622 case GIMPLE_OMP_SECTIONS:
623 cur_region = new_omp_region (bb, code, cur_region);
624 fallthru = true;
625 break;
626
627 case GIMPLE_OMP_SECTIONS_SWITCH:
628 fallthru = false;
629 break;
630
631 case GIMPLE_OMP_ATOMIC_LOAD:
632 case GIMPLE_OMP_ATOMIC_STORE:
633 fallthru = true;
634 break;
635
636 case GIMPLE_OMP_RETURN:
637 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
638 somewhere other than the next block. This will be
639 created later. */
640 cur_region->exit = bb;
641 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
642 cur_region = cur_region->outer;
643 break;
644
645 case GIMPLE_OMP_CONTINUE:
646 cur_region->cont = bb;
647 switch (cur_region->type)
648 {
649 case GIMPLE_OMP_FOR:
650 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
651 succs edges as abnormal to prevent splitting
652 them. */
653 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
654 /* Make the loopback edge. */
655 make_edge (bb, single_succ (cur_region->entry),
656 EDGE_ABNORMAL);
657
658 /* Create an edge from GIMPLE_OMP_FOR to exit, which
659 corresponds to the case that the body of the loop
660 is not executed at all. */
661 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
662 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
663 fallthru = false;
664 break;
665
666 case GIMPLE_OMP_SECTIONS:
667 /* Wire up the edges into and out of the nested sections. */
668 {
669 basic_block switch_bb = single_succ (cur_region->entry);
670
671 struct omp_region *i;
672 for (i = cur_region->inner; i ; i = i->next)
673 {
674 gcc_assert (i->type == GIMPLE_OMP_SECTION);
675 make_edge (switch_bb, i->entry, 0);
676 make_edge (i->exit, bb, EDGE_FALLTHRU);
677 }
678
679 /* Make the loopback edge to the block with
680 GIMPLE_OMP_SECTIONS_SWITCH. */
681 make_edge (bb, switch_bb, 0);
682
683 /* Make the edge from the switch to exit. */
684 make_edge (switch_bb, bb->next_bb, 0);
685 fallthru = false;
686 }
687 break;
688
689 default:
690 gcc_unreachable ();
691 }
692 break;
693
694 case GIMPLE_TRANSACTION:
695 {
696 tree abort_label = gimple_transaction_label (last);
697 if (abort_label)
698 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
699 fallthru = true;
700 }
701 break;
702
703 default:
704 gcc_assert (!stmt_ends_bb_p (last));
705 fallthru = true;
706 }
707 }
708 else
709 fallthru = true;
710
711 if (fallthru)
712 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
713 }
714
715 if (root_omp_region)
716 free_omp_regions ();
717
718 /* Fold COND_EXPR_COND of each COND_EXPR. */
719 fold_cond_expr_cond ();
720 }
721
722 /* Find the next available discriminator value for LOCUS. The
723 discriminator distinguishes among several basic blocks that
724 share a common locus, allowing for more accurate sample-based
725 profiling. */
726
727 static int
728 next_discriminator_for_locus (location_t locus)
729 {
730 struct locus_discrim_map item;
731 struct locus_discrim_map **slot;
732
733 item.locus = locus;
734 item.discriminator = 0;
735 slot = discriminator_per_locus.find_slot_with_hash (
736 &item, LOCATION_LINE (locus), INSERT);
737 gcc_assert (slot);
738 if (*slot == HTAB_EMPTY_ENTRY)
739 {
740 *slot = XNEW (struct locus_discrim_map);
741 gcc_assert (*slot);
742 (*slot)->locus = locus;
743 (*slot)->discriminator = 0;
744 }
745 (*slot)->discriminator++;
746 return (*slot)->discriminator;
747 }
748
749 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
750
751 static bool
752 same_line_p (location_t locus1, location_t locus2)
753 {
754 expanded_location from, to;
755
756 if (locus1 == locus2)
757 return true;
758
759 from = expand_location (locus1);
760 to = expand_location (locus2);
761
762 if (from.line != to.line)
763 return false;
764 if (from.file == to.file)
765 return true;
766 return (from.file != NULL
767 && to.file != NULL
768 && filename_cmp (from.file, to.file) == 0);
769 }
770
771 /* Assign discriminators to each basic block. */
772
773 static void
774 assign_discriminators (void)
775 {
776 basic_block bb;
777
778 FOR_EACH_BB (bb)
779 {
780 edge e;
781 edge_iterator ei;
782 gimple last = last_stmt (bb);
783 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
784
785 if (locus == UNKNOWN_LOCATION)
786 continue;
787
788 FOR_EACH_EDGE (e, ei, bb->succs)
789 {
790 gimple first = first_non_label_stmt (e->dest);
791 gimple last = last_stmt (e->dest);
792 if ((first && same_line_p (locus, gimple_location (first)))
793 || (last && same_line_p (locus, gimple_location (last))))
794 {
795 if (e->dest->discriminator != 0 && bb->discriminator == 0)
796 bb->discriminator = next_discriminator_for_locus (locus);
797 else
798 e->dest->discriminator = next_discriminator_for_locus (locus);
799 }
800 }
801 }
802 }
803
804 /* Create the edges for a GIMPLE_COND starting at block BB. */
805
806 static void
807 make_cond_expr_edges (basic_block bb)
808 {
809 gimple entry = last_stmt (bb);
810 gimple then_stmt, else_stmt;
811 basic_block then_bb, else_bb;
812 tree then_label, else_label;
813 edge e;
814
815 gcc_assert (entry);
816 gcc_assert (gimple_code (entry) == GIMPLE_COND);
817
818 /* Entry basic blocks for each component. */
819 then_label = gimple_cond_true_label (entry);
820 else_label = gimple_cond_false_label (entry);
821 then_bb = label_to_block (then_label);
822 else_bb = label_to_block (else_label);
823 then_stmt = first_stmt (then_bb);
824 else_stmt = first_stmt (else_bb);
825
826 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
827 e->goto_locus = gimple_location (then_stmt);
828 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
829 if (e)
830 e->goto_locus = gimple_location (else_stmt);
831
832 /* We do not need the labels anymore. */
833 gimple_cond_set_true_label (entry, NULL_TREE);
834 gimple_cond_set_false_label (entry, NULL_TREE);
835 }
836
837
838 /* Called for each element in the hash table (P) as we delete the
839 edge to cases hash table.
840
841 Clear all the TREE_CHAINs to prevent problems with copying of
842 SWITCH_EXPRs and structure sharing rules, then free the hash table
843 element. */
844
845 static bool
846 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
847 void *data ATTRIBUTE_UNUSED)
848 {
849 tree t, next;
850
851 for (t = (tree) *value; t; t = next)
852 {
853 next = CASE_CHAIN (t);
854 CASE_CHAIN (t) = NULL;
855 }
856
857 *value = NULL;
858 return true;
859 }
860
861 /* Start recording information mapping edges to case labels. */
862
863 void
864 start_recording_case_labels (void)
865 {
866 gcc_assert (edge_to_cases == NULL);
867 edge_to_cases = pointer_map_create ();
868 touched_switch_bbs = BITMAP_ALLOC (NULL);
869 }
870
871 /* Return nonzero if we are recording information for case labels. */
872
873 static bool
874 recording_case_labels_p (void)
875 {
876 return (edge_to_cases != NULL);
877 }
878
879 /* Stop recording information mapping edges to case labels and
880 remove any information we have recorded. */
881 void
882 end_recording_case_labels (void)
883 {
884 bitmap_iterator bi;
885 unsigned i;
886 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
887 pointer_map_destroy (edge_to_cases);
888 edge_to_cases = NULL;
889 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
890 {
891 basic_block bb = BASIC_BLOCK (i);
892 if (bb)
893 {
894 gimple stmt = last_stmt (bb);
895 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
896 group_case_labels_stmt (stmt);
897 }
898 }
899 BITMAP_FREE (touched_switch_bbs);
900 }
901
902 /* If we are inside a {start,end}_recording_cases block, then return
903 a chain of CASE_LABEL_EXPRs from T which reference E.
904
905 Otherwise return NULL. */
906
907 static tree
908 get_cases_for_edge (edge e, gimple t)
909 {
910 void **slot;
911 size_t i, n;
912
913 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
914 chains available. Return NULL so the caller can detect this case. */
915 if (!recording_case_labels_p ())
916 return NULL;
917
918 slot = pointer_map_contains (edge_to_cases, e);
919 if (slot)
920 return (tree) *slot;
921
922 /* If we did not find E in the hash table, then this must be the first
923 time we have been queried for information about E & T. Add all the
924 elements from T to the hash table then perform the query again. */
925
926 n = gimple_switch_num_labels (t);
927 for (i = 0; i < n; i++)
928 {
929 tree elt = gimple_switch_label (t, i);
930 tree lab = CASE_LABEL (elt);
931 basic_block label_bb = label_to_block (lab);
932 edge this_edge = find_edge (e->src, label_bb);
933
934 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
935 a new chain. */
936 slot = pointer_map_insert (edge_to_cases, this_edge);
937 CASE_CHAIN (elt) = (tree) *slot;
938 *slot = elt;
939 }
940
941 return (tree) *pointer_map_contains (edge_to_cases, e);
942 }
943
944 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
945
946 static void
947 make_gimple_switch_edges (basic_block bb)
948 {
949 gimple entry = last_stmt (bb);
950 size_t i, n;
951
952 n = gimple_switch_num_labels (entry);
953
954 for (i = 0; i < n; ++i)
955 {
956 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
957 basic_block label_bb = label_to_block (lab);
958 make_edge (bb, label_bb, 0);
959 }
960 }
961
962
963 /* Return the basic block holding label DEST. */
964
965 basic_block
966 label_to_block_fn (struct function *ifun, tree dest)
967 {
968 int uid = LABEL_DECL_UID (dest);
969
970 /* We would die hard when faced by an undefined label. Emit a label to
971 the very first basic block. This will hopefully make even the dataflow
972 and undefined variable warnings quite right. */
973 if (seen_error () && uid < 0)
974 {
975 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
976 gimple stmt;
977
978 stmt = gimple_build_label (dest);
979 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
980 uid = LABEL_DECL_UID (dest);
981 }
982 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
983 return NULL;
984 return (*ifun->cfg->x_label_to_block_map)[uid];
985 }
986
987 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
988 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
989
990 void
991 make_abnormal_goto_edges (basic_block bb, bool for_call)
992 {
993 basic_block target_bb;
994 gimple_stmt_iterator gsi;
995
996 FOR_EACH_BB (target_bb)
997 {
998 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
999 {
1000 gimple label_stmt = gsi_stmt (gsi);
1001 tree target;
1002
1003 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1004 break;
1005
1006 target = gimple_label_label (label_stmt);
1007
1008 /* Make an edge to every label block that has been marked as a
1009 potential target for a computed goto or a non-local goto. */
1010 if ((FORCED_LABEL (target) && !for_call)
1011 || (DECL_NONLOCAL (target) && for_call))
1012 {
1013 make_edge (bb, target_bb, EDGE_ABNORMAL);
1014 break;
1015 }
1016 }
1017 if (!gsi_end_p (gsi)
1018 && is_gimple_debug (gsi_stmt (gsi)))
1019 gsi_next_nondebug (&gsi);
1020 if (!gsi_end_p (gsi))
1021 {
1022 /* Make an edge to every setjmp-like call. */
1023 gimple call_stmt = gsi_stmt (gsi);
1024 if (is_gimple_call (call_stmt)
1025 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1026 make_edge (bb, target_bb, EDGE_ABNORMAL);
1027 }
1028 }
1029 }
1030
1031 /* Create edges for a goto statement at block BB. */
1032
1033 static void
1034 make_goto_expr_edges (basic_block bb)
1035 {
1036 gimple_stmt_iterator last = gsi_last_bb (bb);
1037 gimple goto_t = gsi_stmt (last);
1038
1039 /* A simple GOTO creates normal edges. */
1040 if (simple_goto_p (goto_t))
1041 {
1042 tree dest = gimple_goto_dest (goto_t);
1043 basic_block label_bb = label_to_block (dest);
1044 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1045 e->goto_locus = gimple_location (goto_t);
1046 gsi_remove (&last, true);
1047 return;
1048 }
1049
1050 /* A computed GOTO creates abnormal edges. */
1051 make_abnormal_goto_edges (bb, false);
1052 }
1053
1054 /* Create edges for an asm statement with labels at block BB. */
1055
1056 static void
1057 make_gimple_asm_edges (basic_block bb)
1058 {
1059 gimple stmt = last_stmt (bb);
1060 int i, n = gimple_asm_nlabels (stmt);
1061
1062 for (i = 0; i < n; ++i)
1063 {
1064 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1065 basic_block label_bb = label_to_block (label);
1066 make_edge (bb, label_bb, 0);
1067 }
1068 }
1069
1070 /*---------------------------------------------------------------------------
1071 Flowgraph analysis
1072 ---------------------------------------------------------------------------*/
1073
1074 /* Cleanup useless labels in basic blocks. This is something we wish
1075 to do early because it allows us to group case labels before creating
1076 the edges for the CFG, and it speeds up block statement iterators in
1077 all passes later on.
1078 We rerun this pass after CFG is created, to get rid of the labels that
1079 are no longer referenced. After then we do not run it any more, since
1080 (almost) no new labels should be created. */
1081
1082 /* A map from basic block index to the leading label of that block. */
1083 static struct label_record
1084 {
1085 /* The label. */
1086 tree label;
1087
1088 /* True if the label is referenced from somewhere. */
1089 bool used;
1090 } *label_for_bb;
1091
1092 /* Given LABEL return the first label in the same basic block. */
1093
1094 static tree
1095 main_block_label (tree label)
1096 {
1097 basic_block bb = label_to_block (label);
1098 tree main_label = label_for_bb[bb->index].label;
1099
1100 /* label_to_block possibly inserted undefined label into the chain. */
1101 if (!main_label)
1102 {
1103 label_for_bb[bb->index].label = label;
1104 main_label = label;
1105 }
1106
1107 label_for_bb[bb->index].used = true;
1108 return main_label;
1109 }
1110
1111 /* Clean up redundant labels within the exception tree. */
1112
1113 static void
1114 cleanup_dead_labels_eh (void)
1115 {
1116 eh_landing_pad lp;
1117 eh_region r;
1118 tree lab;
1119 int i;
1120
1121 if (cfun->eh == NULL)
1122 return;
1123
1124 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1125 if (lp && lp->post_landing_pad)
1126 {
1127 lab = main_block_label (lp->post_landing_pad);
1128 if (lab != lp->post_landing_pad)
1129 {
1130 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1131 EH_LANDING_PAD_NR (lab) = lp->index;
1132 }
1133 }
1134
1135 FOR_ALL_EH_REGION (r)
1136 switch (r->type)
1137 {
1138 case ERT_CLEANUP:
1139 case ERT_MUST_NOT_THROW:
1140 break;
1141
1142 case ERT_TRY:
1143 {
1144 eh_catch c;
1145 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1146 {
1147 lab = c->label;
1148 if (lab)
1149 c->label = main_block_label (lab);
1150 }
1151 }
1152 break;
1153
1154 case ERT_ALLOWED_EXCEPTIONS:
1155 lab = r->u.allowed.label;
1156 if (lab)
1157 r->u.allowed.label = main_block_label (lab);
1158 break;
1159 }
1160 }
1161
1162
1163 /* Cleanup redundant labels. This is a three-step process:
1164 1) Find the leading label for each block.
1165 2) Redirect all references to labels to the leading labels.
1166 3) Cleanup all useless labels. */
1167
1168 void
1169 cleanup_dead_labels (void)
1170 {
1171 basic_block bb;
1172 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1173
1174 /* Find a suitable label for each block. We use the first user-defined
1175 label if there is one, or otherwise just the first label we see. */
1176 FOR_EACH_BB (bb)
1177 {
1178 gimple_stmt_iterator i;
1179
1180 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1181 {
1182 tree label;
1183 gimple stmt = gsi_stmt (i);
1184
1185 if (gimple_code (stmt) != GIMPLE_LABEL)
1186 break;
1187
1188 label = gimple_label_label (stmt);
1189
1190 /* If we have not yet seen a label for the current block,
1191 remember this one and see if there are more labels. */
1192 if (!label_for_bb[bb->index].label)
1193 {
1194 label_for_bb[bb->index].label = label;
1195 continue;
1196 }
1197
1198 /* If we did see a label for the current block already, but it
1199 is an artificially created label, replace it if the current
1200 label is a user defined label. */
1201 if (!DECL_ARTIFICIAL (label)
1202 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1203 {
1204 label_for_bb[bb->index].label = label;
1205 break;
1206 }
1207 }
1208 }
1209
1210 /* Now redirect all jumps/branches to the selected label.
1211 First do so for each block ending in a control statement. */
1212 FOR_EACH_BB (bb)
1213 {
1214 gimple stmt = last_stmt (bb);
1215 tree label, new_label;
1216
1217 if (!stmt)
1218 continue;
1219
1220 switch (gimple_code (stmt))
1221 {
1222 case GIMPLE_COND:
1223 label = gimple_cond_true_label (stmt);
1224 if (label)
1225 {
1226 new_label = main_block_label (label);
1227 if (new_label != label)
1228 gimple_cond_set_true_label (stmt, new_label);
1229 }
1230
1231 label = gimple_cond_false_label (stmt);
1232 if (label)
1233 {
1234 new_label = main_block_label (label);
1235 if (new_label != label)
1236 gimple_cond_set_false_label (stmt, new_label);
1237 }
1238 break;
1239
1240 case GIMPLE_SWITCH:
1241 {
1242 size_t i, n = gimple_switch_num_labels (stmt);
1243
1244 /* Replace all destination labels. */
1245 for (i = 0; i < n; ++i)
1246 {
1247 tree case_label = gimple_switch_label (stmt, i);
1248 label = CASE_LABEL (case_label);
1249 new_label = main_block_label (label);
1250 if (new_label != label)
1251 CASE_LABEL (case_label) = new_label;
1252 }
1253 break;
1254 }
1255
1256 case GIMPLE_ASM:
1257 {
1258 int i, n = gimple_asm_nlabels (stmt);
1259
1260 for (i = 0; i < n; ++i)
1261 {
1262 tree cons = gimple_asm_label_op (stmt, i);
1263 tree label = main_block_label (TREE_VALUE (cons));
1264 TREE_VALUE (cons) = label;
1265 }
1266 break;
1267 }
1268
1269 /* We have to handle gotos until they're removed, and we don't
1270 remove them until after we've created the CFG edges. */
1271 case GIMPLE_GOTO:
1272 if (!computed_goto_p (stmt))
1273 {
1274 label = gimple_goto_dest (stmt);
1275 new_label = main_block_label (label);
1276 if (new_label != label)
1277 gimple_goto_set_dest (stmt, new_label);
1278 }
1279 break;
1280
1281 case GIMPLE_TRANSACTION:
1282 {
1283 tree label = gimple_transaction_label (stmt);
1284 if (label)
1285 {
1286 tree new_label = main_block_label (label);
1287 if (new_label != label)
1288 gimple_transaction_set_label (stmt, new_label);
1289 }
1290 }
1291 break;
1292
1293 default:
1294 break;
1295 }
1296 }
1297
1298 /* Do the same for the exception region tree labels. */
1299 cleanup_dead_labels_eh ();
1300
1301 /* Finally, purge dead labels. All user-defined labels and labels that
1302 can be the target of non-local gotos and labels which have their
1303 address taken are preserved. */
1304 FOR_EACH_BB (bb)
1305 {
1306 gimple_stmt_iterator i;
1307 tree label_for_this_bb = label_for_bb[bb->index].label;
1308
1309 if (!label_for_this_bb)
1310 continue;
1311
1312 /* If the main label of the block is unused, we may still remove it. */
1313 if (!label_for_bb[bb->index].used)
1314 label_for_this_bb = NULL;
1315
1316 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1317 {
1318 tree label;
1319 gimple stmt = gsi_stmt (i);
1320
1321 if (gimple_code (stmt) != GIMPLE_LABEL)
1322 break;
1323
1324 label = gimple_label_label (stmt);
1325
1326 if (label == label_for_this_bb
1327 || !DECL_ARTIFICIAL (label)
1328 || DECL_NONLOCAL (label)
1329 || FORCED_LABEL (label))
1330 gsi_next (&i);
1331 else
1332 gsi_remove (&i, true);
1333 }
1334 }
1335
1336 free (label_for_bb);
1337 }
1338
1339 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1340 the ones jumping to the same label.
1341 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1342
1343 void
1344 group_case_labels_stmt (gimple stmt)
1345 {
1346 int old_size = gimple_switch_num_labels (stmt);
1347 int i, j, new_size = old_size;
1348 basic_block default_bb = NULL;
1349
1350 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1351
1352 /* Look for possible opportunities to merge cases. */
1353 i = 1;
1354 while (i < old_size)
1355 {
1356 tree base_case, base_high;
1357 basic_block base_bb;
1358
1359 base_case = gimple_switch_label (stmt, i);
1360
1361 gcc_assert (base_case);
1362 base_bb = label_to_block (CASE_LABEL (base_case));
1363
1364 /* Discard cases that have the same destination as the
1365 default case. */
1366 if (base_bb == default_bb)
1367 {
1368 gimple_switch_set_label (stmt, i, NULL_TREE);
1369 i++;
1370 new_size--;
1371 continue;
1372 }
1373
1374 base_high = CASE_HIGH (base_case)
1375 ? CASE_HIGH (base_case)
1376 : CASE_LOW (base_case);
1377 i++;
1378
1379 /* Try to merge case labels. Break out when we reach the end
1380 of the label vector or when we cannot merge the next case
1381 label with the current one. */
1382 while (i < old_size)
1383 {
1384 tree merge_case = gimple_switch_label (stmt, i);
1385 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1386 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1387
1388 /* Merge the cases if they jump to the same place,
1389 and their ranges are consecutive. */
1390 if (merge_bb == base_bb
1391 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1392 {
1393 base_high = CASE_HIGH (merge_case) ?
1394 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1395 CASE_HIGH (base_case) = base_high;
1396 gimple_switch_set_label (stmt, i, NULL_TREE);
1397 new_size--;
1398 i++;
1399 }
1400 else
1401 break;
1402 }
1403 }
1404
1405 /* Compress the case labels in the label vector, and adjust the
1406 length of the vector. */
1407 for (i = 0, j = 0; i < new_size; i++)
1408 {
1409 while (! gimple_switch_label (stmt, j))
1410 j++;
1411 gimple_switch_set_label (stmt, i,
1412 gimple_switch_label (stmt, j++));
1413 }
1414
1415 gcc_assert (new_size <= old_size);
1416 gimple_switch_set_num_labels (stmt, new_size);
1417 }
1418
1419 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1420 and scan the sorted vector of cases. Combine the ones jumping to the
1421 same label. */
1422
1423 void
1424 group_case_labels (void)
1425 {
1426 basic_block bb;
1427
1428 FOR_EACH_BB (bb)
1429 {
1430 gimple stmt = last_stmt (bb);
1431 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1432 group_case_labels_stmt (stmt);
1433 }
1434 }
1435
1436 /* Checks whether we can merge block B into block A. */
1437
1438 static bool
1439 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1440 {
1441 gimple stmt;
1442 gimple_stmt_iterator gsi;
1443
1444 if (!single_succ_p (a))
1445 return false;
1446
1447 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1448 return false;
1449
1450 if (single_succ (a) != b)
1451 return false;
1452
1453 if (!single_pred_p (b))
1454 return false;
1455
1456 if (b == EXIT_BLOCK_PTR)
1457 return false;
1458
1459 /* If A ends by a statement causing exceptions or something similar, we
1460 cannot merge the blocks. */
1461 stmt = last_stmt (a);
1462 if (stmt && stmt_ends_bb_p (stmt))
1463 return false;
1464
1465 /* Do not allow a block with only a non-local label to be merged. */
1466 if (stmt
1467 && gimple_code (stmt) == GIMPLE_LABEL
1468 && DECL_NONLOCAL (gimple_label_label (stmt)))
1469 return false;
1470
1471 /* Examine the labels at the beginning of B. */
1472 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1473 {
1474 tree lab;
1475 stmt = gsi_stmt (gsi);
1476 if (gimple_code (stmt) != GIMPLE_LABEL)
1477 break;
1478 lab = gimple_label_label (stmt);
1479
1480 /* Do not remove user forced labels or for -O0 any user labels. */
1481 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1482 return false;
1483 }
1484
1485 /* Protect the loop latches. */
1486 if (current_loops && b->loop_father->latch == b)
1487 return false;
1488
1489 /* It must be possible to eliminate all phi nodes in B. If ssa form
1490 is not up-to-date and a name-mapping is registered, we cannot eliminate
1491 any phis. Symbols marked for renaming are never a problem though. */
1492 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1493 {
1494 gimple phi = gsi_stmt (gsi);
1495 /* Technically only new names matter. */
1496 if (name_registered_for_update_p (PHI_RESULT (phi)))
1497 return false;
1498 }
1499
1500 /* When not optimizing, don't merge if we'd lose goto_locus. */
1501 if (!optimize
1502 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1503 {
1504 location_t goto_locus = single_succ_edge (a)->goto_locus;
1505 gimple_stmt_iterator prev, next;
1506 prev = gsi_last_nondebug_bb (a);
1507 next = gsi_after_labels (b);
1508 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1509 gsi_next_nondebug (&next);
1510 if ((gsi_end_p (prev)
1511 || gimple_location (gsi_stmt (prev)) != goto_locus)
1512 && (gsi_end_p (next)
1513 || gimple_location (gsi_stmt (next)) != goto_locus))
1514 return false;
1515 }
1516
1517 return true;
1518 }
1519
1520 /* Replaces all uses of NAME by VAL. */
1521
1522 void
1523 replace_uses_by (tree name, tree val)
1524 {
1525 imm_use_iterator imm_iter;
1526 use_operand_p use;
1527 gimple stmt;
1528 edge e;
1529
1530 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1531 {
1532 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1533 {
1534 replace_exp (use, val);
1535
1536 if (gimple_code (stmt) == GIMPLE_PHI)
1537 {
1538 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1539 if (e->flags & EDGE_ABNORMAL)
1540 {
1541 /* This can only occur for virtual operands, since
1542 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1543 would prevent replacement. */
1544 gcc_checking_assert (virtual_operand_p (name));
1545 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1546 }
1547 }
1548 }
1549
1550 if (gimple_code (stmt) != GIMPLE_PHI)
1551 {
1552 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1553 gimple orig_stmt = stmt;
1554 size_t i;
1555
1556 /* Mark the block if we changed the last stmt in it. */
1557 if (cfgcleanup_altered_bbs
1558 && stmt_ends_bb_p (stmt))
1559 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1560
1561 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1562 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1563 only change sth from non-invariant to invariant, and only
1564 when propagating constants. */
1565 if (is_gimple_min_invariant (val))
1566 for (i = 0; i < gimple_num_ops (stmt); i++)
1567 {
1568 tree op = gimple_op (stmt, i);
1569 /* Operands may be empty here. For example, the labels
1570 of a GIMPLE_COND are nulled out following the creation
1571 of the corresponding CFG edges. */
1572 if (op && TREE_CODE (op) == ADDR_EXPR)
1573 recompute_tree_invariant_for_addr_expr (op);
1574 }
1575
1576 if (fold_stmt (&gsi))
1577 stmt = gsi_stmt (gsi);
1578
1579 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1580 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1581
1582 update_stmt (stmt);
1583 }
1584 }
1585
1586 gcc_checking_assert (has_zero_uses (name));
1587
1588 /* Also update the trees stored in loop structures. */
1589 if (current_loops)
1590 {
1591 struct loop *loop;
1592 loop_iterator li;
1593
1594 FOR_EACH_LOOP (li, loop, 0)
1595 {
1596 substitute_in_loop_info (loop, name, val);
1597 }
1598 }
1599 }
1600
1601 /* Merge block B into block A. */
1602
1603 static void
1604 gimple_merge_blocks (basic_block a, basic_block b)
1605 {
1606 gimple_stmt_iterator last, gsi, psi;
1607
1608 if (dump_file)
1609 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1610
1611 /* Remove all single-valued PHI nodes from block B of the form
1612 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1613 gsi = gsi_last_bb (a);
1614 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1615 {
1616 gimple phi = gsi_stmt (psi);
1617 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1618 gimple copy;
1619 bool may_replace_uses = (virtual_operand_p (def)
1620 || may_propagate_copy (def, use));
1621
1622 /* In case we maintain loop closed ssa form, do not propagate arguments
1623 of loop exit phi nodes. */
1624 if (current_loops
1625 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1626 && !virtual_operand_p (def)
1627 && TREE_CODE (use) == SSA_NAME
1628 && a->loop_father != b->loop_father)
1629 may_replace_uses = false;
1630
1631 if (!may_replace_uses)
1632 {
1633 gcc_assert (!virtual_operand_p (def));
1634
1635 /* Note that just emitting the copies is fine -- there is no problem
1636 with ordering of phi nodes. This is because A is the single
1637 predecessor of B, therefore results of the phi nodes cannot
1638 appear as arguments of the phi nodes. */
1639 copy = gimple_build_assign (def, use);
1640 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1641 remove_phi_node (&psi, false);
1642 }
1643 else
1644 {
1645 /* If we deal with a PHI for virtual operands, we can simply
1646 propagate these without fussing with folding or updating
1647 the stmt. */
1648 if (virtual_operand_p (def))
1649 {
1650 imm_use_iterator iter;
1651 use_operand_p use_p;
1652 gimple stmt;
1653
1654 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1655 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1656 SET_USE (use_p, use);
1657
1658 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1659 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1660 }
1661 else
1662 replace_uses_by (def, use);
1663
1664 remove_phi_node (&psi, true);
1665 }
1666 }
1667
1668 /* Ensure that B follows A. */
1669 move_block_after (b, a);
1670
1671 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1672 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1673
1674 /* Remove labels from B and set gimple_bb to A for other statements. */
1675 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1676 {
1677 gimple stmt = gsi_stmt (gsi);
1678 if (gimple_code (stmt) == GIMPLE_LABEL)
1679 {
1680 tree label = gimple_label_label (stmt);
1681 int lp_nr;
1682
1683 gsi_remove (&gsi, false);
1684
1685 /* Now that we can thread computed gotos, we might have
1686 a situation where we have a forced label in block B
1687 However, the label at the start of block B might still be
1688 used in other ways (think about the runtime checking for
1689 Fortran assigned gotos). So we can not just delete the
1690 label. Instead we move the label to the start of block A. */
1691 if (FORCED_LABEL (label))
1692 {
1693 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1694 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1695 }
1696 /* Other user labels keep around in a form of a debug stmt. */
1697 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1698 {
1699 gimple dbg = gimple_build_debug_bind (label,
1700 integer_zero_node,
1701 stmt);
1702 gimple_debug_bind_reset_value (dbg);
1703 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1704 }
1705
1706 lp_nr = EH_LANDING_PAD_NR (label);
1707 if (lp_nr)
1708 {
1709 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1710 lp->post_landing_pad = NULL;
1711 }
1712 }
1713 else
1714 {
1715 gimple_set_bb (stmt, a);
1716 gsi_next (&gsi);
1717 }
1718 }
1719
1720 /* Merge the sequences. */
1721 last = gsi_last_bb (a);
1722 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1723 set_bb_seq (b, NULL);
1724
1725 if (cfgcleanup_altered_bbs)
1726 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1727 }
1728
1729
1730 /* Return the one of two successors of BB that is not reachable by a
1731 complex edge, if there is one. Else, return BB. We use
1732 this in optimizations that use post-dominators for their heuristics,
1733 to catch the cases in C++ where function calls are involved. */
1734
1735 basic_block
1736 single_noncomplex_succ (basic_block bb)
1737 {
1738 edge e0, e1;
1739 if (EDGE_COUNT (bb->succs) != 2)
1740 return bb;
1741
1742 e0 = EDGE_SUCC (bb, 0);
1743 e1 = EDGE_SUCC (bb, 1);
1744 if (e0->flags & EDGE_COMPLEX)
1745 return e1->dest;
1746 if (e1->flags & EDGE_COMPLEX)
1747 return e0->dest;
1748
1749 return bb;
1750 }
1751
1752 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1753
1754 void
1755 notice_special_calls (gimple call)
1756 {
1757 int flags = gimple_call_flags (call);
1758
1759 if (flags & ECF_MAY_BE_ALLOCA)
1760 cfun->calls_alloca = true;
1761 if (flags & ECF_RETURNS_TWICE)
1762 cfun->calls_setjmp = true;
1763 }
1764
1765
1766 /* Clear flags set by notice_special_calls. Used by dead code removal
1767 to update the flags. */
1768
1769 void
1770 clear_special_calls (void)
1771 {
1772 cfun->calls_alloca = false;
1773 cfun->calls_setjmp = false;
1774 }
1775
1776 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1777
1778 static void
1779 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1780 {
1781 /* Since this block is no longer reachable, we can just delete all
1782 of its PHI nodes. */
1783 remove_phi_nodes (bb);
1784
1785 /* Remove edges to BB's successors. */
1786 while (EDGE_COUNT (bb->succs) > 0)
1787 remove_edge (EDGE_SUCC (bb, 0));
1788 }
1789
1790
1791 /* Remove statements of basic block BB. */
1792
1793 static void
1794 remove_bb (basic_block bb)
1795 {
1796 gimple_stmt_iterator i;
1797
1798 if (dump_file)
1799 {
1800 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1801 if (dump_flags & TDF_DETAILS)
1802 {
1803 dump_bb (dump_file, bb, 0, dump_flags);
1804 fprintf (dump_file, "\n");
1805 }
1806 }
1807
1808 if (current_loops)
1809 {
1810 struct loop *loop = bb->loop_father;
1811
1812 /* If a loop gets removed, clean up the information associated
1813 with it. */
1814 if (loop->latch == bb
1815 || loop->header == bb)
1816 free_numbers_of_iterations_estimates_loop (loop);
1817 }
1818
1819 /* Remove all the instructions in the block. */
1820 if (bb_seq (bb) != NULL)
1821 {
1822 /* Walk backwards so as to get a chance to substitute all
1823 released DEFs into debug stmts. See
1824 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1825 details. */
1826 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1827 {
1828 gimple stmt = gsi_stmt (i);
1829 if (gimple_code (stmt) == GIMPLE_LABEL
1830 && (FORCED_LABEL (gimple_label_label (stmt))
1831 || DECL_NONLOCAL (gimple_label_label (stmt))))
1832 {
1833 basic_block new_bb;
1834 gimple_stmt_iterator new_gsi;
1835
1836 /* A non-reachable non-local label may still be referenced.
1837 But it no longer needs to carry the extra semantics of
1838 non-locality. */
1839 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1840 {
1841 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1842 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1843 }
1844
1845 new_bb = bb->prev_bb;
1846 new_gsi = gsi_start_bb (new_bb);
1847 gsi_remove (&i, false);
1848 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1849 }
1850 else
1851 {
1852 /* Release SSA definitions if we are in SSA. Note that we
1853 may be called when not in SSA. For example,
1854 final_cleanup calls this function via
1855 cleanup_tree_cfg. */
1856 if (gimple_in_ssa_p (cfun))
1857 release_defs (stmt);
1858
1859 gsi_remove (&i, true);
1860 }
1861
1862 if (gsi_end_p (i))
1863 i = gsi_last_bb (bb);
1864 else
1865 gsi_prev (&i);
1866 }
1867 }
1868
1869 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1870 bb->il.gimple.seq = NULL;
1871 bb->il.gimple.phi_nodes = NULL;
1872 }
1873
1874
1875 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1876 predicate VAL, return the edge that will be taken out of the block.
1877 If VAL does not match a unique edge, NULL is returned. */
1878
1879 edge
1880 find_taken_edge (basic_block bb, tree val)
1881 {
1882 gimple stmt;
1883
1884 stmt = last_stmt (bb);
1885
1886 gcc_assert (stmt);
1887 gcc_assert (is_ctrl_stmt (stmt));
1888
1889 if (val == NULL)
1890 return NULL;
1891
1892 if (!is_gimple_min_invariant (val))
1893 return NULL;
1894
1895 if (gimple_code (stmt) == GIMPLE_COND)
1896 return find_taken_edge_cond_expr (bb, val);
1897
1898 if (gimple_code (stmt) == GIMPLE_SWITCH)
1899 return find_taken_edge_switch_expr (bb, val);
1900
1901 if (computed_goto_p (stmt))
1902 {
1903 /* Only optimize if the argument is a label, if the argument is
1904 not a label then we can not construct a proper CFG.
1905
1906 It may be the case that we only need to allow the LABEL_REF to
1907 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1908 appear inside a LABEL_EXPR just to be safe. */
1909 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1910 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1911 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1912 return NULL;
1913 }
1914
1915 gcc_unreachable ();
1916 }
1917
1918 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1919 statement, determine which of the outgoing edges will be taken out of the
1920 block. Return NULL if either edge may be taken. */
1921
1922 static edge
1923 find_taken_edge_computed_goto (basic_block bb, tree val)
1924 {
1925 basic_block dest;
1926 edge e = NULL;
1927
1928 dest = label_to_block (val);
1929 if (dest)
1930 {
1931 e = find_edge (bb, dest);
1932 gcc_assert (e != NULL);
1933 }
1934
1935 return e;
1936 }
1937
1938 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1939 statement, determine which of the two edges will be taken out of the
1940 block. Return NULL if either edge may be taken. */
1941
1942 static edge
1943 find_taken_edge_cond_expr (basic_block bb, tree val)
1944 {
1945 edge true_edge, false_edge;
1946
1947 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1948
1949 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1950 return (integer_zerop (val) ? false_edge : true_edge);
1951 }
1952
1953 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1954 statement, determine which edge will be taken out of the block. Return
1955 NULL if any edge may be taken. */
1956
1957 static edge
1958 find_taken_edge_switch_expr (basic_block bb, tree val)
1959 {
1960 basic_block dest_bb;
1961 edge e;
1962 gimple switch_stmt;
1963 tree taken_case;
1964
1965 switch_stmt = last_stmt (bb);
1966 taken_case = find_case_label_for_value (switch_stmt, val);
1967 dest_bb = label_to_block (CASE_LABEL (taken_case));
1968
1969 e = find_edge (bb, dest_bb);
1970 gcc_assert (e);
1971 return e;
1972 }
1973
1974
1975 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1976 We can make optimal use here of the fact that the case labels are
1977 sorted: We can do a binary search for a case matching VAL. */
1978
1979 static tree
1980 find_case_label_for_value (gimple switch_stmt, tree val)
1981 {
1982 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1983 tree default_case = gimple_switch_default_label (switch_stmt);
1984
1985 for (low = 0, high = n; high - low > 1; )
1986 {
1987 size_t i = (high + low) / 2;
1988 tree t = gimple_switch_label (switch_stmt, i);
1989 int cmp;
1990
1991 /* Cache the result of comparing CASE_LOW and val. */
1992 cmp = tree_int_cst_compare (CASE_LOW (t), val);
1993
1994 if (cmp > 0)
1995 high = i;
1996 else
1997 low = i;
1998
1999 if (CASE_HIGH (t) == NULL)
2000 {
2001 /* A singe-valued case label. */
2002 if (cmp == 0)
2003 return t;
2004 }
2005 else
2006 {
2007 /* A case range. We can only handle integer ranges. */
2008 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2009 return t;
2010 }
2011 }
2012
2013 return default_case;
2014 }
2015
2016
2017 /* Dump a basic block on stderr. */
2018
2019 void
2020 gimple_debug_bb (basic_block bb)
2021 {
2022 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2023 }
2024
2025
2026 /* Dump basic block with index N on stderr. */
2027
2028 basic_block
2029 gimple_debug_bb_n (int n)
2030 {
2031 gimple_debug_bb (BASIC_BLOCK (n));
2032 return BASIC_BLOCK (n);
2033 }
2034
2035
2036 /* Dump the CFG on stderr.
2037
2038 FLAGS are the same used by the tree dumping functions
2039 (see TDF_* in dumpfile.h). */
2040
2041 void
2042 gimple_debug_cfg (int flags)
2043 {
2044 gimple_dump_cfg (stderr, flags);
2045 }
2046
2047
2048 /* Dump the program showing basic block boundaries on the given FILE.
2049
2050 FLAGS are the same used by the tree dumping functions (see TDF_* in
2051 tree.h). */
2052
2053 void
2054 gimple_dump_cfg (FILE *file, int flags)
2055 {
2056 if (flags & TDF_DETAILS)
2057 {
2058 dump_function_header (file, current_function_decl, flags);
2059 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2060 n_basic_blocks, n_edges, last_basic_block);
2061
2062 brief_dump_cfg (file, flags | TDF_COMMENT);
2063 fprintf (file, "\n");
2064 }
2065
2066 if (flags & TDF_STATS)
2067 dump_cfg_stats (file);
2068
2069 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2070 }
2071
2072
2073 /* Dump CFG statistics on FILE. */
2074
2075 void
2076 dump_cfg_stats (FILE *file)
2077 {
2078 static long max_num_merged_labels = 0;
2079 unsigned long size, total = 0;
2080 long num_edges;
2081 basic_block bb;
2082 const char * const fmt_str = "%-30s%-13s%12s\n";
2083 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2084 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2085 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2086 const char *funcname = current_function_name ();
2087
2088 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2089
2090 fprintf (file, "---------------------------------------------------------\n");
2091 fprintf (file, fmt_str, "", " Number of ", "Memory");
2092 fprintf (file, fmt_str, "", " instances ", "used ");
2093 fprintf (file, "---------------------------------------------------------\n");
2094
2095 size = n_basic_blocks * sizeof (struct basic_block_def);
2096 total += size;
2097 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2098 SCALE (size), LABEL (size));
2099
2100 num_edges = 0;
2101 FOR_EACH_BB (bb)
2102 num_edges += EDGE_COUNT (bb->succs);
2103 size = num_edges * sizeof (struct edge_def);
2104 total += size;
2105 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2106
2107 fprintf (file, "---------------------------------------------------------\n");
2108 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2109 LABEL (total));
2110 fprintf (file, "---------------------------------------------------------\n");
2111 fprintf (file, "\n");
2112
2113 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2114 max_num_merged_labels = cfg_stats.num_merged_labels;
2115
2116 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2117 cfg_stats.num_merged_labels, max_num_merged_labels);
2118
2119 fprintf (file, "\n");
2120 }
2121
2122
2123 /* Dump CFG statistics on stderr. Keep extern so that it's always
2124 linked in the final executable. */
2125
2126 DEBUG_FUNCTION void
2127 debug_cfg_stats (void)
2128 {
2129 dump_cfg_stats (stderr);
2130 }
2131
2132 /*---------------------------------------------------------------------------
2133 Miscellaneous helpers
2134 ---------------------------------------------------------------------------*/
2135
2136 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2137 flow. Transfers of control flow associated with EH are excluded. */
2138
2139 static bool
2140 call_can_make_abnormal_goto (gimple t)
2141 {
2142 /* If the function has no non-local labels, then a call cannot make an
2143 abnormal transfer of control. */
2144 if (!cfun->has_nonlocal_label
2145 && !cfun->calls_setjmp)
2146 return false;
2147
2148 /* Likewise if the call has no side effects. */
2149 if (!gimple_has_side_effects (t))
2150 return false;
2151
2152 /* Likewise if the called function is leaf. */
2153 if (gimple_call_flags (t) & ECF_LEAF)
2154 return false;
2155
2156 return true;
2157 }
2158
2159
2160 /* Return true if T can make an abnormal transfer of control flow.
2161 Transfers of control flow associated with EH are excluded. */
2162
2163 bool
2164 stmt_can_make_abnormal_goto (gimple t)
2165 {
2166 if (computed_goto_p (t))
2167 return true;
2168 if (is_gimple_call (t))
2169 return call_can_make_abnormal_goto (t);
2170 return false;
2171 }
2172
2173
2174 /* Return true if T represents a stmt that always transfers control. */
2175
2176 bool
2177 is_ctrl_stmt (gimple t)
2178 {
2179 switch (gimple_code (t))
2180 {
2181 case GIMPLE_COND:
2182 case GIMPLE_SWITCH:
2183 case GIMPLE_GOTO:
2184 case GIMPLE_RETURN:
2185 case GIMPLE_RESX:
2186 return true;
2187 default:
2188 return false;
2189 }
2190 }
2191
2192
2193 /* Return true if T is a statement that may alter the flow of control
2194 (e.g., a call to a non-returning function). */
2195
2196 bool
2197 is_ctrl_altering_stmt (gimple t)
2198 {
2199 gcc_assert (t);
2200
2201 switch (gimple_code (t))
2202 {
2203 case GIMPLE_CALL:
2204 {
2205 int flags = gimple_call_flags (t);
2206
2207 /* A call alters control flow if it can make an abnormal goto. */
2208 if (call_can_make_abnormal_goto (t))
2209 return true;
2210
2211 /* A call also alters control flow if it does not return. */
2212 if (flags & ECF_NORETURN)
2213 return true;
2214
2215 /* TM ending statements have backedges out of the transaction.
2216 Return true so we split the basic block containing them.
2217 Note that the TM_BUILTIN test is merely an optimization. */
2218 if ((flags & ECF_TM_BUILTIN)
2219 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2220 return true;
2221
2222 /* BUILT_IN_RETURN call is same as return statement. */
2223 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2224 return true;
2225 }
2226 break;
2227
2228 case GIMPLE_EH_DISPATCH:
2229 /* EH_DISPATCH branches to the individual catch handlers at
2230 this level of a try or allowed-exceptions region. It can
2231 fallthru to the next statement as well. */
2232 return true;
2233
2234 case GIMPLE_ASM:
2235 if (gimple_asm_nlabels (t) > 0)
2236 return true;
2237 break;
2238
2239 CASE_GIMPLE_OMP:
2240 /* OpenMP directives alter control flow. */
2241 return true;
2242
2243 case GIMPLE_TRANSACTION:
2244 /* A transaction start alters control flow. */
2245 return true;
2246
2247 default:
2248 break;
2249 }
2250
2251 /* If a statement can throw, it alters control flow. */
2252 return stmt_can_throw_internal (t);
2253 }
2254
2255
2256 /* Return true if T is a simple local goto. */
2257
2258 bool
2259 simple_goto_p (gimple t)
2260 {
2261 return (gimple_code (t) == GIMPLE_GOTO
2262 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2263 }
2264
2265
2266 /* Return true if STMT should start a new basic block. PREV_STMT is
2267 the statement preceding STMT. It is used when STMT is a label or a
2268 case label. Labels should only start a new basic block if their
2269 previous statement wasn't a label. Otherwise, sequence of labels
2270 would generate unnecessary basic blocks that only contain a single
2271 label. */
2272
2273 static inline bool
2274 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2275 {
2276 if (stmt == NULL)
2277 return false;
2278
2279 /* Labels start a new basic block only if the preceding statement
2280 wasn't a label of the same type. This prevents the creation of
2281 consecutive blocks that have nothing but a single label. */
2282 if (gimple_code (stmt) == GIMPLE_LABEL)
2283 {
2284 /* Nonlocal and computed GOTO targets always start a new block. */
2285 if (DECL_NONLOCAL (gimple_label_label (stmt))
2286 || FORCED_LABEL (gimple_label_label (stmt)))
2287 return true;
2288
2289 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2290 {
2291 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2292 return true;
2293
2294 cfg_stats.num_merged_labels++;
2295 return false;
2296 }
2297 else
2298 return true;
2299 }
2300 else if (gimple_code (stmt) == GIMPLE_CALL
2301 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2302 /* setjmp acts similar to a nonlocal GOTO target and thus should
2303 start a new block. */
2304 return true;
2305
2306 return false;
2307 }
2308
2309
2310 /* Return true if T should end a basic block. */
2311
2312 bool
2313 stmt_ends_bb_p (gimple t)
2314 {
2315 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2316 }
2317
2318 /* Remove block annotations and other data structures. */
2319
2320 void
2321 delete_tree_cfg_annotations (void)
2322 {
2323 vec_free (label_to_block_map);
2324 }
2325
2326
2327 /* Return the first statement in basic block BB. */
2328
2329 gimple
2330 first_stmt (basic_block bb)
2331 {
2332 gimple_stmt_iterator i = gsi_start_bb (bb);
2333 gimple stmt = NULL;
2334
2335 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2336 {
2337 gsi_next (&i);
2338 stmt = NULL;
2339 }
2340 return stmt;
2341 }
2342
2343 /* Return the first non-label statement in basic block BB. */
2344
2345 static gimple
2346 first_non_label_stmt (basic_block bb)
2347 {
2348 gimple_stmt_iterator i = gsi_start_bb (bb);
2349 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2350 gsi_next (&i);
2351 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2352 }
2353
2354 /* Return the last statement in basic block BB. */
2355
2356 gimple
2357 last_stmt (basic_block bb)
2358 {
2359 gimple_stmt_iterator i = gsi_last_bb (bb);
2360 gimple stmt = NULL;
2361
2362 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2363 {
2364 gsi_prev (&i);
2365 stmt = NULL;
2366 }
2367 return stmt;
2368 }
2369
2370 /* Return the last statement of an otherwise empty block. Return NULL
2371 if the block is totally empty, or if it contains more than one
2372 statement. */
2373
2374 gimple
2375 last_and_only_stmt (basic_block bb)
2376 {
2377 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2378 gimple last, prev;
2379
2380 if (gsi_end_p (i))
2381 return NULL;
2382
2383 last = gsi_stmt (i);
2384 gsi_prev_nondebug (&i);
2385 if (gsi_end_p (i))
2386 return last;
2387
2388 /* Empty statements should no longer appear in the instruction stream.
2389 Everything that might have appeared before should be deleted by
2390 remove_useless_stmts, and the optimizers should just gsi_remove
2391 instead of smashing with build_empty_stmt.
2392
2393 Thus the only thing that should appear here in a block containing
2394 one executable statement is a label. */
2395 prev = gsi_stmt (i);
2396 if (gimple_code (prev) == GIMPLE_LABEL)
2397 return last;
2398 else
2399 return NULL;
2400 }
2401
2402 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2403
2404 static void
2405 reinstall_phi_args (edge new_edge, edge old_edge)
2406 {
2407 edge_var_map_vector *v;
2408 edge_var_map *vm;
2409 int i;
2410 gimple_stmt_iterator phis;
2411
2412 v = redirect_edge_var_map_vector (old_edge);
2413 if (!v)
2414 return;
2415
2416 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2417 v->iterate (i, &vm) && !gsi_end_p (phis);
2418 i++, gsi_next (&phis))
2419 {
2420 gimple phi = gsi_stmt (phis);
2421 tree result = redirect_edge_var_map_result (vm);
2422 tree arg = redirect_edge_var_map_def (vm);
2423
2424 gcc_assert (result == gimple_phi_result (phi));
2425
2426 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2427 }
2428
2429 redirect_edge_var_map_clear (old_edge);
2430 }
2431
2432 /* Returns the basic block after which the new basic block created
2433 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2434 near its "logical" location. This is of most help to humans looking
2435 at debugging dumps. */
2436
2437 static basic_block
2438 split_edge_bb_loc (edge edge_in)
2439 {
2440 basic_block dest = edge_in->dest;
2441 basic_block dest_prev = dest->prev_bb;
2442
2443 if (dest_prev)
2444 {
2445 edge e = find_edge (dest_prev, dest);
2446 if (e && !(e->flags & EDGE_COMPLEX))
2447 return edge_in->src;
2448 }
2449 return dest_prev;
2450 }
2451
2452 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2453 Abort on abnormal edges. */
2454
2455 static basic_block
2456 gimple_split_edge (edge edge_in)
2457 {
2458 basic_block new_bb, after_bb, dest;
2459 edge new_edge, e;
2460
2461 /* Abnormal edges cannot be split. */
2462 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2463
2464 dest = edge_in->dest;
2465
2466 after_bb = split_edge_bb_loc (edge_in);
2467
2468 new_bb = create_empty_bb (after_bb);
2469 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2470 new_bb->count = edge_in->count;
2471 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2472 new_edge->probability = REG_BR_PROB_BASE;
2473 new_edge->count = edge_in->count;
2474
2475 e = redirect_edge_and_branch (edge_in, new_bb);
2476 gcc_assert (e == edge_in);
2477 reinstall_phi_args (new_edge, e);
2478
2479 return new_bb;
2480 }
2481
2482
2483 /* Verify properties of the address expression T with base object BASE. */
2484
2485 static tree
2486 verify_address (tree t, tree base)
2487 {
2488 bool old_constant;
2489 bool old_side_effects;
2490 bool new_constant;
2491 bool new_side_effects;
2492
2493 old_constant = TREE_CONSTANT (t);
2494 old_side_effects = TREE_SIDE_EFFECTS (t);
2495
2496 recompute_tree_invariant_for_addr_expr (t);
2497 new_side_effects = TREE_SIDE_EFFECTS (t);
2498 new_constant = TREE_CONSTANT (t);
2499
2500 if (old_constant != new_constant)
2501 {
2502 error ("constant not recomputed when ADDR_EXPR changed");
2503 return t;
2504 }
2505 if (old_side_effects != new_side_effects)
2506 {
2507 error ("side effects not recomputed when ADDR_EXPR changed");
2508 return t;
2509 }
2510
2511 if (!(TREE_CODE (base) == VAR_DECL
2512 || TREE_CODE (base) == PARM_DECL
2513 || TREE_CODE (base) == RESULT_DECL))
2514 return NULL_TREE;
2515
2516 if (DECL_GIMPLE_REG_P (base))
2517 {
2518 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2519 return base;
2520 }
2521
2522 return NULL_TREE;
2523 }
2524
2525 /* Callback for walk_tree, check that all elements with address taken are
2526 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2527 inside a PHI node. */
2528
2529 static tree
2530 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2531 {
2532 tree t = *tp, x;
2533
2534 if (TYPE_P (t))
2535 *walk_subtrees = 0;
2536
2537 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2538 #define CHECK_OP(N, MSG) \
2539 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2540 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2541
2542 switch (TREE_CODE (t))
2543 {
2544 case SSA_NAME:
2545 if (SSA_NAME_IN_FREE_LIST (t))
2546 {
2547 error ("SSA name in freelist but still referenced");
2548 return *tp;
2549 }
2550 break;
2551
2552 case INDIRECT_REF:
2553 error ("INDIRECT_REF in gimple IL");
2554 return t;
2555
2556 case MEM_REF:
2557 x = TREE_OPERAND (t, 0);
2558 if (!POINTER_TYPE_P (TREE_TYPE (x))
2559 || !is_gimple_mem_ref_addr (x))
2560 {
2561 error ("invalid first operand of MEM_REF");
2562 return x;
2563 }
2564 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2565 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2566 {
2567 error ("invalid offset operand of MEM_REF");
2568 return TREE_OPERAND (t, 1);
2569 }
2570 if (TREE_CODE (x) == ADDR_EXPR
2571 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2572 return x;
2573 *walk_subtrees = 0;
2574 break;
2575
2576 case ASSERT_EXPR:
2577 x = fold (ASSERT_EXPR_COND (t));
2578 if (x == boolean_false_node)
2579 {
2580 error ("ASSERT_EXPR with an always-false condition");
2581 return *tp;
2582 }
2583 break;
2584
2585 case MODIFY_EXPR:
2586 error ("MODIFY_EXPR not expected while having tuples");
2587 return *tp;
2588
2589 case ADDR_EXPR:
2590 {
2591 tree tem;
2592
2593 gcc_assert (is_gimple_address (t));
2594
2595 /* Skip any references (they will be checked when we recurse down the
2596 tree) and ensure that any variable used as a prefix is marked
2597 addressable. */
2598 for (x = TREE_OPERAND (t, 0);
2599 handled_component_p (x);
2600 x = TREE_OPERAND (x, 0))
2601 ;
2602
2603 if ((tem = verify_address (t, x)))
2604 return tem;
2605
2606 if (!(TREE_CODE (x) == VAR_DECL
2607 || TREE_CODE (x) == PARM_DECL
2608 || TREE_CODE (x) == RESULT_DECL))
2609 return NULL;
2610
2611 if (!TREE_ADDRESSABLE (x))
2612 {
2613 error ("address taken, but ADDRESSABLE bit not set");
2614 return x;
2615 }
2616
2617 break;
2618 }
2619
2620 case COND_EXPR:
2621 x = COND_EXPR_COND (t);
2622 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2623 {
2624 error ("non-integral used in condition");
2625 return x;
2626 }
2627 if (!is_gimple_condexpr (x))
2628 {
2629 error ("invalid conditional operand");
2630 return x;
2631 }
2632 break;
2633
2634 case NON_LVALUE_EXPR:
2635 case TRUTH_NOT_EXPR:
2636 gcc_unreachable ();
2637
2638 CASE_CONVERT:
2639 case FIX_TRUNC_EXPR:
2640 case FLOAT_EXPR:
2641 case NEGATE_EXPR:
2642 case ABS_EXPR:
2643 case BIT_NOT_EXPR:
2644 CHECK_OP (0, "invalid operand to unary operator");
2645 break;
2646
2647 case REALPART_EXPR:
2648 case IMAGPART_EXPR:
2649 case BIT_FIELD_REF:
2650 if (!is_gimple_reg_type (TREE_TYPE (t)))
2651 {
2652 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2653 return t;
2654 }
2655
2656 if (TREE_CODE (t) == BIT_FIELD_REF)
2657 {
2658 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2659 || !host_integerp (TREE_OPERAND (t, 2), 1))
2660 {
2661 error ("invalid position or size operand to BIT_FIELD_REF");
2662 return t;
2663 }
2664 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2665 && (TYPE_PRECISION (TREE_TYPE (t))
2666 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2667 {
2668 error ("integral result type precision does not match "
2669 "field size of BIT_FIELD_REF");
2670 return t;
2671 }
2672 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2673 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2674 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2675 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2676 {
2677 error ("mode precision of non-integral result does not "
2678 "match field size of BIT_FIELD_REF");
2679 return t;
2680 }
2681 }
2682 t = TREE_OPERAND (t, 0);
2683
2684 /* Fall-through. */
2685 case COMPONENT_REF:
2686 case ARRAY_REF:
2687 case ARRAY_RANGE_REF:
2688 case VIEW_CONVERT_EXPR:
2689 /* We have a nest of references. Verify that each of the operands
2690 that determine where to reference is either a constant or a variable,
2691 verify that the base is valid, and then show we've already checked
2692 the subtrees. */
2693 while (handled_component_p (t))
2694 {
2695 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2696 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2697 else if (TREE_CODE (t) == ARRAY_REF
2698 || TREE_CODE (t) == ARRAY_RANGE_REF)
2699 {
2700 CHECK_OP (1, "invalid array index");
2701 if (TREE_OPERAND (t, 2))
2702 CHECK_OP (2, "invalid array lower bound");
2703 if (TREE_OPERAND (t, 3))
2704 CHECK_OP (3, "invalid array stride");
2705 }
2706 else if (TREE_CODE (t) == BIT_FIELD_REF
2707 || TREE_CODE (t) == REALPART_EXPR
2708 || TREE_CODE (t) == IMAGPART_EXPR)
2709 {
2710 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2711 "REALPART_EXPR");
2712 return t;
2713 }
2714
2715 t = TREE_OPERAND (t, 0);
2716 }
2717
2718 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2719 {
2720 error ("invalid reference prefix");
2721 return t;
2722 }
2723 *walk_subtrees = 0;
2724 break;
2725 case PLUS_EXPR:
2726 case MINUS_EXPR:
2727 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2728 POINTER_PLUS_EXPR. */
2729 if (POINTER_TYPE_P (TREE_TYPE (t)))
2730 {
2731 error ("invalid operand to plus/minus, type is a pointer");
2732 return t;
2733 }
2734 CHECK_OP (0, "invalid operand to binary operator");
2735 CHECK_OP (1, "invalid operand to binary operator");
2736 break;
2737
2738 case POINTER_PLUS_EXPR:
2739 /* Check to make sure the first operand is a pointer or reference type. */
2740 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2741 {
2742 error ("invalid operand to pointer plus, first operand is not a pointer");
2743 return t;
2744 }
2745 /* Check to make sure the second operand is a ptrofftype. */
2746 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2747 {
2748 error ("invalid operand to pointer plus, second operand is not an "
2749 "integer type of appropriate width");
2750 return t;
2751 }
2752 /* FALLTHROUGH */
2753 case LT_EXPR:
2754 case LE_EXPR:
2755 case GT_EXPR:
2756 case GE_EXPR:
2757 case EQ_EXPR:
2758 case NE_EXPR:
2759 case UNORDERED_EXPR:
2760 case ORDERED_EXPR:
2761 case UNLT_EXPR:
2762 case UNLE_EXPR:
2763 case UNGT_EXPR:
2764 case UNGE_EXPR:
2765 case UNEQ_EXPR:
2766 case LTGT_EXPR:
2767 case MULT_EXPR:
2768 case TRUNC_DIV_EXPR:
2769 case CEIL_DIV_EXPR:
2770 case FLOOR_DIV_EXPR:
2771 case ROUND_DIV_EXPR:
2772 case TRUNC_MOD_EXPR:
2773 case CEIL_MOD_EXPR:
2774 case FLOOR_MOD_EXPR:
2775 case ROUND_MOD_EXPR:
2776 case RDIV_EXPR:
2777 case EXACT_DIV_EXPR:
2778 case MIN_EXPR:
2779 case MAX_EXPR:
2780 case LSHIFT_EXPR:
2781 case RSHIFT_EXPR:
2782 case LROTATE_EXPR:
2783 case RROTATE_EXPR:
2784 case BIT_IOR_EXPR:
2785 case BIT_XOR_EXPR:
2786 case BIT_AND_EXPR:
2787 CHECK_OP (0, "invalid operand to binary operator");
2788 CHECK_OP (1, "invalid operand to binary operator");
2789 break;
2790
2791 case CONSTRUCTOR:
2792 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2793 *walk_subtrees = 0;
2794 break;
2795
2796 case CASE_LABEL_EXPR:
2797 if (CASE_CHAIN (t))
2798 {
2799 error ("invalid CASE_CHAIN");
2800 return t;
2801 }
2802 break;
2803
2804 default:
2805 break;
2806 }
2807 return NULL;
2808
2809 #undef CHECK_OP
2810 }
2811
2812
2813 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2814 Returns true if there is an error, otherwise false. */
2815
2816 static bool
2817 verify_types_in_gimple_min_lval (tree expr)
2818 {
2819 tree op;
2820
2821 if (is_gimple_id (expr))
2822 return false;
2823
2824 if (TREE_CODE (expr) != TARGET_MEM_REF
2825 && TREE_CODE (expr) != MEM_REF)
2826 {
2827 error ("invalid expression for min lvalue");
2828 return true;
2829 }
2830
2831 /* TARGET_MEM_REFs are strange beasts. */
2832 if (TREE_CODE (expr) == TARGET_MEM_REF)
2833 return false;
2834
2835 op = TREE_OPERAND (expr, 0);
2836 if (!is_gimple_val (op))
2837 {
2838 error ("invalid operand in indirect reference");
2839 debug_generic_stmt (op);
2840 return true;
2841 }
2842 /* Memory references now generally can involve a value conversion. */
2843
2844 return false;
2845 }
2846
2847 /* Verify if EXPR is a valid GIMPLE reference expression. If
2848 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2849 if there is an error, otherwise false. */
2850
2851 static bool
2852 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2853 {
2854 while (handled_component_p (expr))
2855 {
2856 tree op = TREE_OPERAND (expr, 0);
2857
2858 if (TREE_CODE (expr) == ARRAY_REF
2859 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2860 {
2861 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2862 || (TREE_OPERAND (expr, 2)
2863 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2864 || (TREE_OPERAND (expr, 3)
2865 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2866 {
2867 error ("invalid operands to array reference");
2868 debug_generic_stmt (expr);
2869 return true;
2870 }
2871 }
2872
2873 /* Verify if the reference array element types are compatible. */
2874 if (TREE_CODE (expr) == ARRAY_REF
2875 && !useless_type_conversion_p (TREE_TYPE (expr),
2876 TREE_TYPE (TREE_TYPE (op))))
2877 {
2878 error ("type mismatch in array reference");
2879 debug_generic_stmt (TREE_TYPE (expr));
2880 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2881 return true;
2882 }
2883 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2884 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2885 TREE_TYPE (TREE_TYPE (op))))
2886 {
2887 error ("type mismatch in array range reference");
2888 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2889 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2890 return true;
2891 }
2892
2893 if ((TREE_CODE (expr) == REALPART_EXPR
2894 || TREE_CODE (expr) == IMAGPART_EXPR)
2895 && !useless_type_conversion_p (TREE_TYPE (expr),
2896 TREE_TYPE (TREE_TYPE (op))))
2897 {
2898 error ("type mismatch in real/imagpart reference");
2899 debug_generic_stmt (TREE_TYPE (expr));
2900 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2901 return true;
2902 }
2903
2904 if (TREE_CODE (expr) == COMPONENT_REF
2905 && !useless_type_conversion_p (TREE_TYPE (expr),
2906 TREE_TYPE (TREE_OPERAND (expr, 1))))
2907 {
2908 error ("type mismatch in component reference");
2909 debug_generic_stmt (TREE_TYPE (expr));
2910 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2911 return true;
2912 }
2913
2914 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2915 {
2916 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2917 that their operand is not an SSA name or an invariant when
2918 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2919 bug). Otherwise there is nothing to verify, gross mismatches at
2920 most invoke undefined behavior. */
2921 if (require_lvalue
2922 && (TREE_CODE (op) == SSA_NAME
2923 || is_gimple_min_invariant (op)))
2924 {
2925 error ("conversion of an SSA_NAME on the left hand side");
2926 debug_generic_stmt (expr);
2927 return true;
2928 }
2929 else if (TREE_CODE (op) == SSA_NAME
2930 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2931 {
2932 error ("conversion of register to a different size");
2933 debug_generic_stmt (expr);
2934 return true;
2935 }
2936 else if (!handled_component_p (op))
2937 return false;
2938 }
2939
2940 expr = op;
2941 }
2942
2943 if (TREE_CODE (expr) == MEM_REF)
2944 {
2945 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2946 {
2947 error ("invalid address operand in MEM_REF");
2948 debug_generic_stmt (expr);
2949 return true;
2950 }
2951 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2952 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2953 {
2954 error ("invalid offset operand in MEM_REF");
2955 debug_generic_stmt (expr);
2956 return true;
2957 }
2958 }
2959 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2960 {
2961 if (!TMR_BASE (expr)
2962 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2963 {
2964 error ("invalid address operand in TARGET_MEM_REF");
2965 return true;
2966 }
2967 if (!TMR_OFFSET (expr)
2968 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2969 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2970 {
2971 error ("invalid offset operand in TARGET_MEM_REF");
2972 debug_generic_stmt (expr);
2973 return true;
2974 }
2975 }
2976
2977 return ((require_lvalue || !is_gimple_min_invariant (expr))
2978 && verify_types_in_gimple_min_lval (expr));
2979 }
2980
2981 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2982 list of pointer-to types that is trivially convertible to DEST. */
2983
2984 static bool
2985 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2986 {
2987 tree src;
2988
2989 if (!TYPE_POINTER_TO (src_obj))
2990 return true;
2991
2992 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
2993 if (useless_type_conversion_p (dest, src))
2994 return true;
2995
2996 return false;
2997 }
2998
2999 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3000 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3001
3002 static bool
3003 valid_fixed_convert_types_p (tree type1, tree type2)
3004 {
3005 return (FIXED_POINT_TYPE_P (type1)
3006 && (INTEGRAL_TYPE_P (type2)
3007 || SCALAR_FLOAT_TYPE_P (type2)
3008 || FIXED_POINT_TYPE_P (type2)));
3009 }
3010
3011 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3012 is a problem, otherwise false. */
3013
3014 static bool
3015 verify_gimple_call (gimple stmt)
3016 {
3017 tree fn = gimple_call_fn (stmt);
3018 tree fntype, fndecl;
3019 unsigned i;
3020
3021 if (gimple_call_internal_p (stmt))
3022 {
3023 if (fn)
3024 {
3025 error ("gimple call has two targets");
3026 debug_generic_stmt (fn);
3027 return true;
3028 }
3029 }
3030 else
3031 {
3032 if (!fn)
3033 {
3034 error ("gimple call has no target");
3035 return true;
3036 }
3037 }
3038
3039 if (fn && !is_gimple_call_addr (fn))
3040 {
3041 error ("invalid function in gimple call");
3042 debug_generic_stmt (fn);
3043 return true;
3044 }
3045
3046 if (fn
3047 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3048 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3049 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3050 {
3051 error ("non-function in gimple call");
3052 return true;
3053 }
3054
3055 fndecl = gimple_call_fndecl (stmt);
3056 if (fndecl
3057 && TREE_CODE (fndecl) == FUNCTION_DECL
3058 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3059 && !DECL_PURE_P (fndecl)
3060 && !TREE_READONLY (fndecl))
3061 {
3062 error ("invalid pure const state for function");
3063 return true;
3064 }
3065
3066 if (gimple_call_lhs (stmt)
3067 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3068 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3069 {
3070 error ("invalid LHS in gimple call");
3071 return true;
3072 }
3073
3074 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3075 {
3076 error ("LHS in noreturn call");
3077 return true;
3078 }
3079
3080 fntype = gimple_call_fntype (stmt);
3081 if (fntype
3082 && gimple_call_lhs (stmt)
3083 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3084 TREE_TYPE (fntype))
3085 /* ??? At least C++ misses conversions at assignments from
3086 void * call results.
3087 ??? Java is completely off. Especially with functions
3088 returning java.lang.Object.
3089 For now simply allow arbitrary pointer type conversions. */
3090 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3091 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3092 {
3093 error ("invalid conversion in gimple call");
3094 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3095 debug_generic_stmt (TREE_TYPE (fntype));
3096 return true;
3097 }
3098
3099 if (gimple_call_chain (stmt)
3100 && !is_gimple_val (gimple_call_chain (stmt)))
3101 {
3102 error ("invalid static chain in gimple call");
3103 debug_generic_stmt (gimple_call_chain (stmt));
3104 return true;
3105 }
3106
3107 /* If there is a static chain argument, this should not be an indirect
3108 call, and the decl should have DECL_STATIC_CHAIN set. */
3109 if (gimple_call_chain (stmt))
3110 {
3111 if (!gimple_call_fndecl (stmt))
3112 {
3113 error ("static chain in indirect gimple call");
3114 return true;
3115 }
3116 fn = TREE_OPERAND (fn, 0);
3117
3118 if (!DECL_STATIC_CHAIN (fn))
3119 {
3120 error ("static chain with function that doesn%'t use one");
3121 return true;
3122 }
3123 }
3124
3125 /* ??? The C frontend passes unpromoted arguments in case it
3126 didn't see a function declaration before the call. So for now
3127 leave the call arguments mostly unverified. Once we gimplify
3128 unit-at-a-time we have a chance to fix this. */
3129
3130 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3131 {
3132 tree arg = gimple_call_arg (stmt, i);
3133 if ((is_gimple_reg_type (TREE_TYPE (arg))
3134 && !is_gimple_val (arg))
3135 || (!is_gimple_reg_type (TREE_TYPE (arg))
3136 && !is_gimple_lvalue (arg)))
3137 {
3138 error ("invalid argument to gimple call");
3139 debug_generic_expr (arg);
3140 return true;
3141 }
3142 }
3143
3144 return false;
3145 }
3146
3147 /* Verifies the gimple comparison with the result type TYPE and
3148 the operands OP0 and OP1. */
3149
3150 static bool
3151 verify_gimple_comparison (tree type, tree op0, tree op1)
3152 {
3153 tree op0_type = TREE_TYPE (op0);
3154 tree op1_type = TREE_TYPE (op1);
3155
3156 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3157 {
3158 error ("invalid operands in gimple comparison");
3159 return true;
3160 }
3161
3162 /* For comparisons we do not have the operations type as the
3163 effective type the comparison is carried out in. Instead
3164 we require that either the first operand is trivially
3165 convertible into the second, or the other way around.
3166 Because we special-case pointers to void we allow
3167 comparisons of pointers with the same mode as well. */
3168 if (!useless_type_conversion_p (op0_type, op1_type)
3169 && !useless_type_conversion_p (op1_type, op0_type)
3170 && (!POINTER_TYPE_P (op0_type)
3171 || !POINTER_TYPE_P (op1_type)
3172 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3173 {
3174 error ("mismatching comparison operand types");
3175 debug_generic_expr (op0_type);
3176 debug_generic_expr (op1_type);
3177 return true;
3178 }
3179
3180 /* The resulting type of a comparison may be an effective boolean type. */
3181 if (INTEGRAL_TYPE_P (type)
3182 && (TREE_CODE (type) == BOOLEAN_TYPE
3183 || TYPE_PRECISION (type) == 1))
3184 {
3185 if (TREE_CODE (op0_type) == VECTOR_TYPE
3186 || TREE_CODE (op1_type) == VECTOR_TYPE)
3187 {
3188 error ("vector comparison returning a boolean");
3189 debug_generic_expr (op0_type);
3190 debug_generic_expr (op1_type);
3191 return true;
3192 }
3193 }
3194 /* Or an integer vector type with the same size and element count
3195 as the comparison operand types. */
3196 else if (TREE_CODE (type) == VECTOR_TYPE
3197 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3198 {
3199 if (TREE_CODE (op0_type) != VECTOR_TYPE
3200 || TREE_CODE (op1_type) != VECTOR_TYPE)
3201 {
3202 error ("non-vector operands in vector comparison");
3203 debug_generic_expr (op0_type);
3204 debug_generic_expr (op1_type);
3205 return true;
3206 }
3207
3208 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3209 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3210 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3211 /* The result of a vector comparison is of signed
3212 integral type. */
3213 || TYPE_UNSIGNED (TREE_TYPE (type)))
3214 {
3215 error ("invalid vector comparison resulting type");
3216 debug_generic_expr (type);
3217 return true;
3218 }
3219 }
3220 else
3221 {
3222 error ("bogus comparison result type");
3223 debug_generic_expr (type);
3224 return true;
3225 }
3226
3227 return false;
3228 }
3229
3230 /* Verify a gimple assignment statement STMT with an unary rhs.
3231 Returns true if anything is wrong. */
3232
3233 static bool
3234 verify_gimple_assign_unary (gimple stmt)
3235 {
3236 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3237 tree lhs = gimple_assign_lhs (stmt);
3238 tree lhs_type = TREE_TYPE (lhs);
3239 tree rhs1 = gimple_assign_rhs1 (stmt);
3240 tree rhs1_type = TREE_TYPE (rhs1);
3241
3242 if (!is_gimple_reg (lhs))
3243 {
3244 error ("non-register as LHS of unary operation");
3245 return true;
3246 }
3247
3248 if (!is_gimple_val (rhs1))
3249 {
3250 error ("invalid operand in unary operation");
3251 return true;
3252 }
3253
3254 /* First handle conversions. */
3255 switch (rhs_code)
3256 {
3257 CASE_CONVERT:
3258 {
3259 /* Allow conversions from pointer type to integral type only if
3260 there is no sign or zero extension involved.
3261 For targets were the precision of ptrofftype doesn't match that
3262 of pointers we need to allow arbitrary conversions to ptrofftype. */
3263 if ((POINTER_TYPE_P (lhs_type)
3264 && INTEGRAL_TYPE_P (rhs1_type))
3265 || (POINTER_TYPE_P (rhs1_type)
3266 && INTEGRAL_TYPE_P (lhs_type)
3267 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3268 || ptrofftype_p (sizetype))))
3269 return false;
3270
3271 /* Allow conversion from integral to offset type and vice versa. */
3272 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3273 && INTEGRAL_TYPE_P (rhs1_type))
3274 || (INTEGRAL_TYPE_P (lhs_type)
3275 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3276 return false;
3277
3278 /* Otherwise assert we are converting between types of the
3279 same kind. */
3280 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3281 {
3282 error ("invalid types in nop conversion");
3283 debug_generic_expr (lhs_type);
3284 debug_generic_expr (rhs1_type);
3285 return true;
3286 }
3287
3288 return false;
3289 }
3290
3291 case ADDR_SPACE_CONVERT_EXPR:
3292 {
3293 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3294 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3295 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3296 {
3297 error ("invalid types in address space conversion");
3298 debug_generic_expr (lhs_type);
3299 debug_generic_expr (rhs1_type);
3300 return true;
3301 }
3302
3303 return false;
3304 }
3305
3306 case FIXED_CONVERT_EXPR:
3307 {
3308 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3309 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3310 {
3311 error ("invalid types in fixed-point conversion");
3312 debug_generic_expr (lhs_type);
3313 debug_generic_expr (rhs1_type);
3314 return true;
3315 }
3316
3317 return false;
3318 }
3319
3320 case FLOAT_EXPR:
3321 {
3322 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3323 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3324 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3325 {
3326 error ("invalid types in conversion to floating point");
3327 debug_generic_expr (lhs_type);
3328 debug_generic_expr (rhs1_type);
3329 return true;
3330 }
3331
3332 return false;
3333 }
3334
3335 case FIX_TRUNC_EXPR:
3336 {
3337 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3338 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3339 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3340 {
3341 error ("invalid types in conversion to integer");
3342 debug_generic_expr (lhs_type);
3343 debug_generic_expr (rhs1_type);
3344 return true;
3345 }
3346
3347 return false;
3348 }
3349
3350 case VEC_UNPACK_HI_EXPR:
3351 case VEC_UNPACK_LO_EXPR:
3352 case REDUC_MAX_EXPR:
3353 case REDUC_MIN_EXPR:
3354 case REDUC_PLUS_EXPR:
3355 case VEC_UNPACK_FLOAT_HI_EXPR:
3356 case VEC_UNPACK_FLOAT_LO_EXPR:
3357 /* FIXME. */
3358 return false;
3359
3360 case NEGATE_EXPR:
3361 case ABS_EXPR:
3362 case BIT_NOT_EXPR:
3363 case PAREN_EXPR:
3364 case NON_LVALUE_EXPR:
3365 case CONJ_EXPR:
3366 break;
3367
3368 default:
3369 gcc_unreachable ();
3370 }
3371
3372 /* For the remaining codes assert there is no conversion involved. */
3373 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3374 {
3375 error ("non-trivial conversion in unary operation");
3376 debug_generic_expr (lhs_type);
3377 debug_generic_expr (rhs1_type);
3378 return true;
3379 }
3380
3381 return false;
3382 }
3383
3384 /* Verify a gimple assignment statement STMT with a binary rhs.
3385 Returns true if anything is wrong. */
3386
3387 static bool
3388 verify_gimple_assign_binary (gimple stmt)
3389 {
3390 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3391 tree lhs = gimple_assign_lhs (stmt);
3392 tree lhs_type = TREE_TYPE (lhs);
3393 tree rhs1 = gimple_assign_rhs1 (stmt);
3394 tree rhs1_type = TREE_TYPE (rhs1);
3395 tree rhs2 = gimple_assign_rhs2 (stmt);
3396 tree rhs2_type = TREE_TYPE (rhs2);
3397
3398 if (!is_gimple_reg (lhs))
3399 {
3400 error ("non-register as LHS of binary operation");
3401 return true;
3402 }
3403
3404 if (!is_gimple_val (rhs1)
3405 || !is_gimple_val (rhs2))
3406 {
3407 error ("invalid operands in binary operation");
3408 return true;
3409 }
3410
3411 /* First handle operations that involve different types. */
3412 switch (rhs_code)
3413 {
3414 case COMPLEX_EXPR:
3415 {
3416 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3417 || !(INTEGRAL_TYPE_P (rhs1_type)
3418 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3419 || !(INTEGRAL_TYPE_P (rhs2_type)
3420 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3421 {
3422 error ("type mismatch in complex expression");
3423 debug_generic_expr (lhs_type);
3424 debug_generic_expr (rhs1_type);
3425 debug_generic_expr (rhs2_type);
3426 return true;
3427 }
3428
3429 return false;
3430 }
3431
3432 case LSHIFT_EXPR:
3433 case RSHIFT_EXPR:
3434 case LROTATE_EXPR:
3435 case RROTATE_EXPR:
3436 {
3437 /* Shifts and rotates are ok on integral types, fixed point
3438 types and integer vector types. */
3439 if ((!INTEGRAL_TYPE_P (rhs1_type)
3440 && !FIXED_POINT_TYPE_P (rhs1_type)
3441 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3442 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3443 || (!INTEGRAL_TYPE_P (rhs2_type)
3444 /* Vector shifts of vectors are also ok. */
3445 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3446 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3447 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3448 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3449 || !useless_type_conversion_p (lhs_type, rhs1_type))
3450 {
3451 error ("type mismatch in shift expression");
3452 debug_generic_expr (lhs_type);
3453 debug_generic_expr (rhs1_type);
3454 debug_generic_expr (rhs2_type);
3455 return true;
3456 }
3457
3458 return false;
3459 }
3460
3461 case VEC_LSHIFT_EXPR:
3462 case VEC_RSHIFT_EXPR:
3463 {
3464 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3465 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3466 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3467 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3468 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3469 || (!INTEGRAL_TYPE_P (rhs2_type)
3470 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3471 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3472 || !useless_type_conversion_p (lhs_type, rhs1_type))
3473 {
3474 error ("type mismatch in vector shift expression");
3475 debug_generic_expr (lhs_type);
3476 debug_generic_expr (rhs1_type);
3477 debug_generic_expr (rhs2_type);
3478 return true;
3479 }
3480 /* For shifting a vector of non-integral components we
3481 only allow shifting by a constant multiple of the element size. */
3482 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3483 && (TREE_CODE (rhs2) != INTEGER_CST
3484 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3485 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3486 {
3487 error ("non-element sized vector shift of floating point vector");
3488 return true;
3489 }
3490
3491 return false;
3492 }
3493
3494 case WIDEN_LSHIFT_EXPR:
3495 {
3496 if (!INTEGRAL_TYPE_P (lhs_type)
3497 || !INTEGRAL_TYPE_P (rhs1_type)
3498 || TREE_CODE (rhs2) != INTEGER_CST
3499 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3500 {
3501 error ("type mismatch in widening vector shift expression");
3502 debug_generic_expr (lhs_type);
3503 debug_generic_expr (rhs1_type);
3504 debug_generic_expr (rhs2_type);
3505 return true;
3506 }
3507
3508 return false;
3509 }
3510
3511 case VEC_WIDEN_LSHIFT_HI_EXPR:
3512 case VEC_WIDEN_LSHIFT_LO_EXPR:
3513 {
3514 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3515 || TREE_CODE (lhs_type) != VECTOR_TYPE
3516 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3517 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3518 || TREE_CODE (rhs2) != INTEGER_CST
3519 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3520 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3521 {
3522 error ("type mismatch in widening vector shift expression");
3523 debug_generic_expr (lhs_type);
3524 debug_generic_expr (rhs1_type);
3525 debug_generic_expr (rhs2_type);
3526 return true;
3527 }
3528
3529 return false;
3530 }
3531
3532 case PLUS_EXPR:
3533 case MINUS_EXPR:
3534 {
3535 tree lhs_etype = lhs_type;
3536 tree rhs1_etype = rhs1_type;
3537 tree rhs2_etype = rhs2_type;
3538 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3539 {
3540 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3541 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3542 {
3543 error ("invalid non-vector operands to vector valued plus");
3544 return true;
3545 }
3546 lhs_etype = TREE_TYPE (lhs_type);
3547 rhs1_etype = TREE_TYPE (rhs1_type);
3548 rhs2_etype = TREE_TYPE (rhs2_type);
3549 }
3550 if (POINTER_TYPE_P (lhs_etype)
3551 || POINTER_TYPE_P (rhs1_etype)
3552 || POINTER_TYPE_P (rhs2_etype))
3553 {
3554 error ("invalid (pointer) operands to plus/minus");
3555 return true;
3556 }
3557
3558 /* Continue with generic binary expression handling. */
3559 break;
3560 }
3561
3562 case POINTER_PLUS_EXPR:
3563 {
3564 if (!POINTER_TYPE_P (rhs1_type)
3565 || !useless_type_conversion_p (lhs_type, rhs1_type)
3566 || !ptrofftype_p (rhs2_type))
3567 {
3568 error ("type mismatch in pointer plus expression");
3569 debug_generic_stmt (lhs_type);
3570 debug_generic_stmt (rhs1_type);
3571 debug_generic_stmt (rhs2_type);
3572 return true;
3573 }
3574
3575 return false;
3576 }
3577
3578 case TRUTH_ANDIF_EXPR:
3579 case TRUTH_ORIF_EXPR:
3580 case TRUTH_AND_EXPR:
3581 case TRUTH_OR_EXPR:
3582 case TRUTH_XOR_EXPR:
3583
3584 gcc_unreachable ();
3585
3586 case LT_EXPR:
3587 case LE_EXPR:
3588 case GT_EXPR:
3589 case GE_EXPR:
3590 case EQ_EXPR:
3591 case NE_EXPR:
3592 case UNORDERED_EXPR:
3593 case ORDERED_EXPR:
3594 case UNLT_EXPR:
3595 case UNLE_EXPR:
3596 case UNGT_EXPR:
3597 case UNGE_EXPR:
3598 case UNEQ_EXPR:
3599 case LTGT_EXPR:
3600 /* Comparisons are also binary, but the result type is not
3601 connected to the operand types. */
3602 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3603
3604 case WIDEN_MULT_EXPR:
3605 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3606 return true;
3607 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3608 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3609
3610 case WIDEN_SUM_EXPR:
3611 case VEC_WIDEN_MULT_HI_EXPR:
3612 case VEC_WIDEN_MULT_LO_EXPR:
3613 case VEC_WIDEN_MULT_EVEN_EXPR:
3614 case VEC_WIDEN_MULT_ODD_EXPR:
3615 case VEC_PACK_TRUNC_EXPR:
3616 case VEC_PACK_SAT_EXPR:
3617 case VEC_PACK_FIX_TRUNC_EXPR:
3618 /* FIXME. */
3619 return false;
3620
3621 case MULT_EXPR:
3622 case MULT_HIGHPART_EXPR:
3623 case TRUNC_DIV_EXPR:
3624 case CEIL_DIV_EXPR:
3625 case FLOOR_DIV_EXPR:
3626 case ROUND_DIV_EXPR:
3627 case TRUNC_MOD_EXPR:
3628 case CEIL_MOD_EXPR:
3629 case FLOOR_MOD_EXPR:
3630 case ROUND_MOD_EXPR:
3631 case RDIV_EXPR:
3632 case EXACT_DIV_EXPR:
3633 case MIN_EXPR:
3634 case MAX_EXPR:
3635 case BIT_IOR_EXPR:
3636 case BIT_XOR_EXPR:
3637 case BIT_AND_EXPR:
3638 /* Continue with generic binary expression handling. */
3639 break;
3640
3641 default:
3642 gcc_unreachable ();
3643 }
3644
3645 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3646 || !useless_type_conversion_p (lhs_type, rhs2_type))
3647 {
3648 error ("type mismatch in binary expression");
3649 debug_generic_stmt (lhs_type);
3650 debug_generic_stmt (rhs1_type);
3651 debug_generic_stmt (rhs2_type);
3652 return true;
3653 }
3654
3655 return false;
3656 }
3657
3658 /* Verify a gimple assignment statement STMT with a ternary rhs.
3659 Returns true if anything is wrong. */
3660
3661 static bool
3662 verify_gimple_assign_ternary (gimple stmt)
3663 {
3664 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3665 tree lhs = gimple_assign_lhs (stmt);
3666 tree lhs_type = TREE_TYPE (lhs);
3667 tree rhs1 = gimple_assign_rhs1 (stmt);
3668 tree rhs1_type = TREE_TYPE (rhs1);
3669 tree rhs2 = gimple_assign_rhs2 (stmt);
3670 tree rhs2_type = TREE_TYPE (rhs2);
3671 tree rhs3 = gimple_assign_rhs3 (stmt);
3672 tree rhs3_type = TREE_TYPE (rhs3);
3673
3674 if (!is_gimple_reg (lhs))
3675 {
3676 error ("non-register as LHS of ternary operation");
3677 return true;
3678 }
3679
3680 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3681 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3682 || !is_gimple_val (rhs2)
3683 || !is_gimple_val (rhs3))
3684 {
3685 error ("invalid operands in ternary operation");
3686 return true;
3687 }
3688
3689 /* First handle operations that involve different types. */
3690 switch (rhs_code)
3691 {
3692 case WIDEN_MULT_PLUS_EXPR:
3693 case WIDEN_MULT_MINUS_EXPR:
3694 if ((!INTEGRAL_TYPE_P (rhs1_type)
3695 && !FIXED_POINT_TYPE_P (rhs1_type))
3696 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3697 || !useless_type_conversion_p (lhs_type, rhs3_type)
3698 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3699 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3700 {
3701 error ("type mismatch in widening multiply-accumulate expression");
3702 debug_generic_expr (lhs_type);
3703 debug_generic_expr (rhs1_type);
3704 debug_generic_expr (rhs2_type);
3705 debug_generic_expr (rhs3_type);
3706 return true;
3707 }
3708 break;
3709
3710 case FMA_EXPR:
3711 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3712 || !useless_type_conversion_p (lhs_type, rhs2_type)
3713 || !useless_type_conversion_p (lhs_type, rhs3_type))
3714 {
3715 error ("type mismatch in fused multiply-add expression");
3716 debug_generic_expr (lhs_type);
3717 debug_generic_expr (rhs1_type);
3718 debug_generic_expr (rhs2_type);
3719 debug_generic_expr (rhs3_type);
3720 return true;
3721 }
3722 break;
3723
3724 case COND_EXPR:
3725 case VEC_COND_EXPR:
3726 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3727 || !useless_type_conversion_p (lhs_type, rhs3_type))
3728 {
3729 error ("type mismatch in conditional expression");
3730 debug_generic_expr (lhs_type);
3731 debug_generic_expr (rhs2_type);
3732 debug_generic_expr (rhs3_type);
3733 return true;
3734 }
3735 break;
3736
3737 case VEC_PERM_EXPR:
3738 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3739 || !useless_type_conversion_p (lhs_type, rhs2_type))
3740 {
3741 error ("type mismatch in vector permute expression");
3742 debug_generic_expr (lhs_type);
3743 debug_generic_expr (rhs1_type);
3744 debug_generic_expr (rhs2_type);
3745 debug_generic_expr (rhs3_type);
3746 return true;
3747 }
3748
3749 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3750 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3751 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3752 {
3753 error ("vector types expected in vector permute expression");
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3756 debug_generic_expr (rhs2_type);
3757 debug_generic_expr (rhs3_type);
3758 return true;
3759 }
3760
3761 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3762 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3763 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3764 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3765 != TYPE_VECTOR_SUBPARTS (lhs_type))
3766 {
3767 error ("vectors with different element number found "
3768 "in vector permute expression");
3769 debug_generic_expr (lhs_type);
3770 debug_generic_expr (rhs1_type);
3771 debug_generic_expr (rhs2_type);
3772 debug_generic_expr (rhs3_type);
3773 return true;
3774 }
3775
3776 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3777 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3778 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3779 {
3780 error ("invalid mask type in vector permute expression");
3781 debug_generic_expr (lhs_type);
3782 debug_generic_expr (rhs1_type);
3783 debug_generic_expr (rhs2_type);
3784 debug_generic_expr (rhs3_type);
3785 return true;
3786 }
3787
3788 return false;
3789
3790 case DOT_PROD_EXPR:
3791 case REALIGN_LOAD_EXPR:
3792 /* FIXME. */
3793 return false;
3794
3795 default:
3796 gcc_unreachable ();
3797 }
3798 return false;
3799 }
3800
3801 /* Verify a gimple assignment statement STMT with a single rhs.
3802 Returns true if anything is wrong. */
3803
3804 static bool
3805 verify_gimple_assign_single (gimple stmt)
3806 {
3807 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3808 tree lhs = gimple_assign_lhs (stmt);
3809 tree lhs_type = TREE_TYPE (lhs);
3810 tree rhs1 = gimple_assign_rhs1 (stmt);
3811 tree rhs1_type = TREE_TYPE (rhs1);
3812 bool res = false;
3813
3814 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3815 {
3816 error ("non-trivial conversion at assignment");
3817 debug_generic_expr (lhs_type);
3818 debug_generic_expr (rhs1_type);
3819 return true;
3820 }
3821
3822 if (gimple_clobber_p (stmt)
3823 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3824 {
3825 error ("non-decl/MEM_REF LHS in clobber statement");
3826 debug_generic_expr (lhs);
3827 return true;
3828 }
3829
3830 if (handled_component_p (lhs))
3831 res |= verify_types_in_gimple_reference (lhs, true);
3832
3833 /* Special codes we cannot handle via their class. */
3834 switch (rhs_code)
3835 {
3836 case ADDR_EXPR:
3837 {
3838 tree op = TREE_OPERAND (rhs1, 0);
3839 if (!is_gimple_addressable (op))
3840 {
3841 error ("invalid operand in unary expression");
3842 return true;
3843 }
3844
3845 /* Technically there is no longer a need for matching types, but
3846 gimple hygiene asks for this check. In LTO we can end up
3847 combining incompatible units and thus end up with addresses
3848 of globals that change their type to a common one. */
3849 if (!in_lto_p
3850 && !types_compatible_p (TREE_TYPE (op),
3851 TREE_TYPE (TREE_TYPE (rhs1)))
3852 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3853 TREE_TYPE (op)))
3854 {
3855 error ("type mismatch in address expression");
3856 debug_generic_stmt (TREE_TYPE (rhs1));
3857 debug_generic_stmt (TREE_TYPE (op));
3858 return true;
3859 }
3860
3861 return verify_types_in_gimple_reference (op, true);
3862 }
3863
3864 /* tcc_reference */
3865 case INDIRECT_REF:
3866 error ("INDIRECT_REF in gimple IL");
3867 return true;
3868
3869 case COMPONENT_REF:
3870 case BIT_FIELD_REF:
3871 case ARRAY_REF:
3872 case ARRAY_RANGE_REF:
3873 case VIEW_CONVERT_EXPR:
3874 case REALPART_EXPR:
3875 case IMAGPART_EXPR:
3876 case TARGET_MEM_REF:
3877 case MEM_REF:
3878 if (!is_gimple_reg (lhs)
3879 && is_gimple_reg_type (TREE_TYPE (lhs)))
3880 {
3881 error ("invalid rhs for gimple memory store");
3882 debug_generic_stmt (lhs);
3883 debug_generic_stmt (rhs1);
3884 return true;
3885 }
3886 return res || verify_types_in_gimple_reference (rhs1, false);
3887
3888 /* tcc_constant */
3889 case SSA_NAME:
3890 case INTEGER_CST:
3891 case REAL_CST:
3892 case FIXED_CST:
3893 case COMPLEX_CST:
3894 case VECTOR_CST:
3895 case STRING_CST:
3896 return res;
3897
3898 /* tcc_declaration */
3899 case CONST_DECL:
3900 return res;
3901 case VAR_DECL:
3902 case PARM_DECL:
3903 if (!is_gimple_reg (lhs)
3904 && !is_gimple_reg (rhs1)
3905 && is_gimple_reg_type (TREE_TYPE (lhs)))
3906 {
3907 error ("invalid rhs for gimple memory store");
3908 debug_generic_stmt (lhs);
3909 debug_generic_stmt (rhs1);
3910 return true;
3911 }
3912 return res;
3913
3914 case CONSTRUCTOR:
3915 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3916 {
3917 unsigned int i;
3918 tree elt_i, elt_v, elt_t = NULL_TREE;
3919
3920 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3921 return res;
3922 /* For vector CONSTRUCTORs we require that either it is empty
3923 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3924 (then the element count must be correct to cover the whole
3925 outer vector and index must be NULL on all elements, or it is
3926 a CONSTRUCTOR of scalar elements, where we as an exception allow
3927 smaller number of elements (assuming zero filling) and
3928 consecutive indexes as compared to NULL indexes (such
3929 CONSTRUCTORs can appear in the IL from FEs). */
3930 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3931 {
3932 if (elt_t == NULL_TREE)
3933 {
3934 elt_t = TREE_TYPE (elt_v);
3935 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3936 {
3937 tree elt_t = TREE_TYPE (elt_v);
3938 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3939 TREE_TYPE (elt_t)))
3940 {
3941 error ("incorrect type of vector CONSTRUCTOR"
3942 " elements");
3943 debug_generic_stmt (rhs1);
3944 return true;
3945 }
3946 else if (CONSTRUCTOR_NELTS (rhs1)
3947 * TYPE_VECTOR_SUBPARTS (elt_t)
3948 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3949 {
3950 error ("incorrect number of vector CONSTRUCTOR"
3951 " elements");
3952 debug_generic_stmt (rhs1);
3953 return true;
3954 }
3955 }
3956 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3957 elt_t))
3958 {
3959 error ("incorrect type of vector CONSTRUCTOR elements");
3960 debug_generic_stmt (rhs1);
3961 return true;
3962 }
3963 else if (CONSTRUCTOR_NELTS (rhs1)
3964 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3965 {
3966 error ("incorrect number of vector CONSTRUCTOR elements");
3967 debug_generic_stmt (rhs1);
3968 return true;
3969 }
3970 }
3971 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3972 {
3973 error ("incorrect type of vector CONSTRUCTOR elements");
3974 debug_generic_stmt (rhs1);
3975 return true;
3976 }
3977 if (elt_i != NULL_TREE
3978 && (TREE_CODE (elt_t) == VECTOR_TYPE
3979 || TREE_CODE (elt_i) != INTEGER_CST
3980 || compare_tree_int (elt_i, i) != 0))
3981 {
3982 error ("vector CONSTRUCTOR with non-NULL element index");
3983 debug_generic_stmt (rhs1);
3984 return true;
3985 }
3986 }
3987 }
3988 return res;
3989 case OBJ_TYPE_REF:
3990 case ASSERT_EXPR:
3991 case WITH_SIZE_EXPR:
3992 /* FIXME. */
3993 return res;
3994
3995 default:;
3996 }
3997
3998 return res;
3999 }
4000
4001 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4002 is a problem, otherwise false. */
4003
4004 static bool
4005 verify_gimple_assign (gimple stmt)
4006 {
4007 switch (gimple_assign_rhs_class (stmt))
4008 {
4009 case GIMPLE_SINGLE_RHS:
4010 return verify_gimple_assign_single (stmt);
4011
4012 case GIMPLE_UNARY_RHS:
4013 return verify_gimple_assign_unary (stmt);
4014
4015 case GIMPLE_BINARY_RHS:
4016 return verify_gimple_assign_binary (stmt);
4017
4018 case GIMPLE_TERNARY_RHS:
4019 return verify_gimple_assign_ternary (stmt);
4020
4021 default:
4022 gcc_unreachable ();
4023 }
4024 }
4025
4026 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4027 is a problem, otherwise false. */
4028
4029 static bool
4030 verify_gimple_return (gimple stmt)
4031 {
4032 tree op = gimple_return_retval (stmt);
4033 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4034
4035 /* We cannot test for present return values as we do not fix up missing
4036 return values from the original source. */
4037 if (op == NULL)
4038 return false;
4039
4040 if (!is_gimple_val (op)
4041 && TREE_CODE (op) != RESULT_DECL)
4042 {
4043 error ("invalid operand in return statement");
4044 debug_generic_stmt (op);
4045 return true;
4046 }
4047
4048 if ((TREE_CODE (op) == RESULT_DECL
4049 && DECL_BY_REFERENCE (op))
4050 || (TREE_CODE (op) == SSA_NAME
4051 && SSA_NAME_VAR (op)
4052 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4053 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4054 op = TREE_TYPE (op);
4055
4056 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4057 {
4058 error ("invalid conversion in return statement");
4059 debug_generic_stmt (restype);
4060 debug_generic_stmt (TREE_TYPE (op));
4061 return true;
4062 }
4063
4064 return false;
4065 }
4066
4067
4068 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4069 is a problem, otherwise false. */
4070
4071 static bool
4072 verify_gimple_goto (gimple stmt)
4073 {
4074 tree dest = gimple_goto_dest (stmt);
4075
4076 /* ??? We have two canonical forms of direct goto destinations, a
4077 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4078 if (TREE_CODE (dest) != LABEL_DECL
4079 && (!is_gimple_val (dest)
4080 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4081 {
4082 error ("goto destination is neither a label nor a pointer");
4083 return true;
4084 }
4085
4086 return false;
4087 }
4088
4089 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4090 is a problem, otherwise false. */
4091
4092 static bool
4093 verify_gimple_switch (gimple stmt)
4094 {
4095 unsigned int i, n;
4096 tree elt, prev_upper_bound = NULL_TREE;
4097 tree index_type, elt_type = NULL_TREE;
4098
4099 if (!is_gimple_val (gimple_switch_index (stmt)))
4100 {
4101 error ("invalid operand to switch statement");
4102 debug_generic_stmt (gimple_switch_index (stmt));
4103 return true;
4104 }
4105
4106 index_type = TREE_TYPE (gimple_switch_index (stmt));
4107 if (! INTEGRAL_TYPE_P (index_type))
4108 {
4109 error ("non-integral type switch statement");
4110 debug_generic_expr (index_type);
4111 return true;
4112 }
4113
4114 elt = gimple_switch_label (stmt, 0);
4115 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4116 {
4117 error ("invalid default case label in switch statement");
4118 debug_generic_expr (elt);
4119 return true;
4120 }
4121
4122 n = gimple_switch_num_labels (stmt);
4123 for (i = 1; i < n; i++)
4124 {
4125 elt = gimple_switch_label (stmt, i);
4126
4127 if (! CASE_LOW (elt))
4128 {
4129 error ("invalid case label in switch statement");
4130 debug_generic_expr (elt);
4131 return true;
4132 }
4133 if (CASE_HIGH (elt)
4134 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4135 {
4136 error ("invalid case range in switch statement");
4137 debug_generic_expr (elt);
4138 return true;
4139 }
4140
4141 if (elt_type)
4142 {
4143 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4144 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4145 {
4146 error ("type mismatch for case label in switch statement");
4147 debug_generic_expr (elt);
4148 return true;
4149 }
4150 }
4151 else
4152 {
4153 elt_type = TREE_TYPE (CASE_LOW (elt));
4154 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4155 {
4156 error ("type precision mismatch in switch statement");
4157 return true;
4158 }
4159 }
4160
4161 if (prev_upper_bound)
4162 {
4163 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4164 {
4165 error ("case labels not sorted in switch statement");
4166 return true;
4167 }
4168 }
4169
4170 prev_upper_bound = CASE_HIGH (elt);
4171 if (! prev_upper_bound)
4172 prev_upper_bound = CASE_LOW (elt);
4173 }
4174
4175 return false;
4176 }
4177
4178 /* Verify a gimple debug statement STMT.
4179 Returns true if anything is wrong. */
4180
4181 static bool
4182 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4183 {
4184 /* There isn't much that could be wrong in a gimple debug stmt. A
4185 gimple debug bind stmt, for example, maps a tree, that's usually
4186 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4187 component or member of an aggregate type, to another tree, that
4188 can be an arbitrary expression. These stmts expand into debug
4189 insns, and are converted to debug notes by var-tracking.c. */
4190 return false;
4191 }
4192
4193 /* Verify a gimple label statement STMT.
4194 Returns true if anything is wrong. */
4195
4196 static bool
4197 verify_gimple_label (gimple stmt)
4198 {
4199 tree decl = gimple_label_label (stmt);
4200 int uid;
4201 bool err = false;
4202
4203 if (TREE_CODE (decl) != LABEL_DECL)
4204 return true;
4205 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4206 && DECL_CONTEXT (decl) != current_function_decl)
4207 {
4208 error ("label's context is not the current function decl");
4209 err |= true;
4210 }
4211
4212 uid = LABEL_DECL_UID (decl);
4213 if (cfun->cfg
4214 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4215 {
4216 error ("incorrect entry in label_to_block_map");
4217 err |= true;
4218 }
4219
4220 uid = EH_LANDING_PAD_NR (decl);
4221 if (uid)
4222 {
4223 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4224 if (decl != lp->post_landing_pad)
4225 {
4226 error ("incorrect setting of landing pad number");
4227 err |= true;
4228 }
4229 }
4230
4231 return err;
4232 }
4233
4234 /* Verify the GIMPLE statement STMT. Returns true if there is an
4235 error, otherwise false. */
4236
4237 static bool
4238 verify_gimple_stmt (gimple stmt)
4239 {
4240 switch (gimple_code (stmt))
4241 {
4242 case GIMPLE_ASSIGN:
4243 return verify_gimple_assign (stmt);
4244
4245 case GIMPLE_LABEL:
4246 return verify_gimple_label (stmt);
4247
4248 case GIMPLE_CALL:
4249 return verify_gimple_call (stmt);
4250
4251 case GIMPLE_COND:
4252 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4253 {
4254 error ("invalid comparison code in gimple cond");
4255 return true;
4256 }
4257 if (!(!gimple_cond_true_label (stmt)
4258 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4259 || !(!gimple_cond_false_label (stmt)
4260 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4261 {
4262 error ("invalid labels in gimple cond");
4263 return true;
4264 }
4265
4266 return verify_gimple_comparison (boolean_type_node,
4267 gimple_cond_lhs (stmt),
4268 gimple_cond_rhs (stmt));
4269
4270 case GIMPLE_GOTO:
4271 return verify_gimple_goto (stmt);
4272
4273 case GIMPLE_SWITCH:
4274 return verify_gimple_switch (stmt);
4275
4276 case GIMPLE_RETURN:
4277 return verify_gimple_return (stmt);
4278
4279 case GIMPLE_ASM:
4280 return false;
4281
4282 case GIMPLE_TRANSACTION:
4283 return verify_gimple_transaction (stmt);
4284
4285 /* Tuples that do not have tree operands. */
4286 case GIMPLE_NOP:
4287 case GIMPLE_PREDICT:
4288 case GIMPLE_RESX:
4289 case GIMPLE_EH_DISPATCH:
4290 case GIMPLE_EH_MUST_NOT_THROW:
4291 return false;
4292
4293 CASE_GIMPLE_OMP:
4294 /* OpenMP directives are validated by the FE and never operated
4295 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4296 non-gimple expressions when the main index variable has had
4297 its address taken. This does not affect the loop itself
4298 because the header of an GIMPLE_OMP_FOR is merely used to determine
4299 how to setup the parallel iteration. */
4300 return false;
4301
4302 case GIMPLE_DEBUG:
4303 return verify_gimple_debug (stmt);
4304
4305 default:
4306 gcc_unreachable ();
4307 }
4308 }
4309
4310 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4311 and false otherwise. */
4312
4313 static bool
4314 verify_gimple_phi (gimple phi)
4315 {
4316 bool err = false;
4317 unsigned i;
4318 tree phi_result = gimple_phi_result (phi);
4319 bool virtual_p;
4320
4321 if (!phi_result)
4322 {
4323 error ("invalid PHI result");
4324 return true;
4325 }
4326
4327 virtual_p = virtual_operand_p (phi_result);
4328 if (TREE_CODE (phi_result) != SSA_NAME
4329 || (virtual_p
4330 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4331 {
4332 error ("invalid PHI result");
4333 err = true;
4334 }
4335
4336 for (i = 0; i < gimple_phi_num_args (phi); i++)
4337 {
4338 tree t = gimple_phi_arg_def (phi, i);
4339
4340 if (!t)
4341 {
4342 error ("missing PHI def");
4343 err |= true;
4344 continue;
4345 }
4346 /* Addressable variables do have SSA_NAMEs but they
4347 are not considered gimple values. */
4348 else if ((TREE_CODE (t) == SSA_NAME
4349 && virtual_p != virtual_operand_p (t))
4350 || (virtual_p
4351 && (TREE_CODE (t) != SSA_NAME
4352 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4353 || (!virtual_p
4354 && !is_gimple_val (t)))
4355 {
4356 error ("invalid PHI argument");
4357 debug_generic_expr (t);
4358 err |= true;
4359 }
4360 #ifdef ENABLE_TYPES_CHECKING
4361 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4362 {
4363 error ("incompatible types in PHI argument %u", i);
4364 debug_generic_stmt (TREE_TYPE (phi_result));
4365 debug_generic_stmt (TREE_TYPE (t));
4366 err |= true;
4367 }
4368 #endif
4369 }
4370
4371 return err;
4372 }
4373
4374 /* Verify the GIMPLE statements inside the sequence STMTS. */
4375
4376 static bool
4377 verify_gimple_in_seq_2 (gimple_seq stmts)
4378 {
4379 gimple_stmt_iterator ittr;
4380 bool err = false;
4381
4382 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4383 {
4384 gimple stmt = gsi_stmt (ittr);
4385
4386 switch (gimple_code (stmt))
4387 {
4388 case GIMPLE_BIND:
4389 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4390 break;
4391
4392 case GIMPLE_TRY:
4393 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4394 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4395 break;
4396
4397 case GIMPLE_EH_FILTER:
4398 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4399 break;
4400
4401 case GIMPLE_EH_ELSE:
4402 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4403 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4404 break;
4405
4406 case GIMPLE_CATCH:
4407 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4408 break;
4409
4410 case GIMPLE_TRANSACTION:
4411 err |= verify_gimple_transaction (stmt);
4412 break;
4413
4414 default:
4415 {
4416 bool err2 = verify_gimple_stmt (stmt);
4417 if (err2)
4418 debug_gimple_stmt (stmt);
4419 err |= err2;
4420 }
4421 }
4422 }
4423
4424 return err;
4425 }
4426
4427 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4428 is a problem, otherwise false. */
4429
4430 static bool
4431 verify_gimple_transaction (gimple stmt)
4432 {
4433 tree lab = gimple_transaction_label (stmt);
4434 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4435 return true;
4436 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4437 }
4438
4439
4440 /* Verify the GIMPLE statements inside the statement list STMTS. */
4441
4442 DEBUG_FUNCTION void
4443 verify_gimple_in_seq (gimple_seq stmts)
4444 {
4445 timevar_push (TV_TREE_STMT_VERIFY);
4446 if (verify_gimple_in_seq_2 (stmts))
4447 internal_error ("verify_gimple failed");
4448 timevar_pop (TV_TREE_STMT_VERIFY);
4449 }
4450
4451 /* Return true when the T can be shared. */
4452
4453 static bool
4454 tree_node_can_be_shared (tree t)
4455 {
4456 if (IS_TYPE_OR_DECL_P (t)
4457 || is_gimple_min_invariant (t)
4458 || TREE_CODE (t) == SSA_NAME
4459 || t == error_mark_node
4460 || TREE_CODE (t) == IDENTIFIER_NODE)
4461 return true;
4462
4463 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4464 return true;
4465
4466 if (DECL_P (t))
4467 return true;
4468
4469 return false;
4470 }
4471
4472 /* Called via walk_tree. Verify tree sharing. */
4473
4474 static tree
4475 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4476 {
4477 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4478
4479 if (tree_node_can_be_shared (*tp))
4480 {
4481 *walk_subtrees = false;
4482 return NULL;
4483 }
4484
4485 if (pointer_set_insert (visited, *tp))
4486 return *tp;
4487
4488 return NULL;
4489 }
4490
4491 /* Called via walk_gimple_stmt. Verify tree sharing. */
4492
4493 static tree
4494 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4495 {
4496 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4497 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4498 }
4499
4500 static bool eh_error_found;
4501 static int
4502 verify_eh_throw_stmt_node (void **slot, void *data)
4503 {
4504 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4505 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4506
4507 if (!pointer_set_contains (visited, node->stmt))
4508 {
4509 error ("dead STMT in EH table");
4510 debug_gimple_stmt (node->stmt);
4511 eh_error_found = true;
4512 }
4513 return 1;
4514 }
4515
4516 /* Verify if the location LOCs block is in BLOCKS. */
4517
4518 static bool
4519 verify_location (pointer_set_t *blocks, location_t loc)
4520 {
4521 tree block = LOCATION_BLOCK (loc);
4522 if (block != NULL_TREE
4523 && !pointer_set_contains (blocks, block))
4524 {
4525 error ("location references block not in block tree");
4526 return true;
4527 }
4528 if (block != NULL_TREE)
4529 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4530 return false;
4531 }
4532
4533 /* Called via walk_tree. Verify that expressions have no blocks. */
4534
4535 static tree
4536 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4537 {
4538 if (!EXPR_P (*tp))
4539 {
4540 *walk_subtrees = false;
4541 return NULL;
4542 }
4543
4544 location_t loc = EXPR_LOCATION (*tp);
4545 if (LOCATION_BLOCK (loc) != NULL)
4546 return *tp;
4547
4548 return NULL;
4549 }
4550
4551 /* Called via walk_tree. Verify locations of expressions. */
4552
4553 static tree
4554 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4555 {
4556 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4557
4558 if (TREE_CODE (*tp) == VAR_DECL
4559 && DECL_HAS_DEBUG_EXPR_P (*tp))
4560 {
4561 tree t = DECL_DEBUG_EXPR (*tp);
4562 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4563 if (addr)
4564 return addr;
4565 }
4566 if ((TREE_CODE (*tp) == VAR_DECL
4567 || TREE_CODE (*tp) == PARM_DECL
4568 || TREE_CODE (*tp) == RESULT_DECL)
4569 && DECL_HAS_VALUE_EXPR_P (*tp))
4570 {
4571 tree t = DECL_VALUE_EXPR (*tp);
4572 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4573 if (addr)
4574 return addr;
4575 }
4576
4577 if (!EXPR_P (*tp))
4578 {
4579 *walk_subtrees = false;
4580 return NULL;
4581 }
4582
4583 location_t loc = EXPR_LOCATION (*tp);
4584 if (verify_location (blocks, loc))
4585 return *tp;
4586
4587 return NULL;
4588 }
4589
4590 /* Called via walk_gimple_op. Verify locations of expressions. */
4591
4592 static tree
4593 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4594 {
4595 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4596 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4597 }
4598
4599 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4600
4601 static void
4602 collect_subblocks (pointer_set_t *blocks, tree block)
4603 {
4604 tree t;
4605 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4606 {
4607 pointer_set_insert (blocks, t);
4608 collect_subblocks (blocks, t);
4609 }
4610 }
4611
4612 /* Verify the GIMPLE statements in the CFG of FN. */
4613
4614 DEBUG_FUNCTION void
4615 verify_gimple_in_cfg (struct function *fn)
4616 {
4617 basic_block bb;
4618 bool err = false;
4619 struct pointer_set_t *visited, *visited_stmts, *blocks;
4620
4621 timevar_push (TV_TREE_STMT_VERIFY);
4622 visited = pointer_set_create ();
4623 visited_stmts = pointer_set_create ();
4624
4625 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4626 blocks = pointer_set_create ();
4627 if (DECL_INITIAL (fn->decl))
4628 {
4629 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4630 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4631 }
4632
4633 FOR_EACH_BB_FN (bb, fn)
4634 {
4635 gimple_stmt_iterator gsi;
4636
4637 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4638 {
4639 gimple phi = gsi_stmt (gsi);
4640 bool err2 = false;
4641 unsigned i;
4642
4643 pointer_set_insert (visited_stmts, phi);
4644
4645 if (gimple_bb (phi) != bb)
4646 {
4647 error ("gimple_bb (phi) is set to a wrong basic block");
4648 err2 = true;
4649 }
4650
4651 err2 |= verify_gimple_phi (phi);
4652
4653 /* Only PHI arguments have locations. */
4654 if (gimple_location (phi) != UNKNOWN_LOCATION)
4655 {
4656 error ("PHI node with location");
4657 err2 = true;
4658 }
4659
4660 for (i = 0; i < gimple_phi_num_args (phi); i++)
4661 {
4662 tree arg = gimple_phi_arg_def (phi, i);
4663 tree addr = walk_tree (&arg, verify_node_sharing_1,
4664 visited, NULL);
4665 if (addr)
4666 {
4667 error ("incorrect sharing of tree nodes");
4668 debug_generic_expr (addr);
4669 err2 |= true;
4670 }
4671 location_t loc = gimple_phi_arg_location (phi, i);
4672 if (virtual_operand_p (gimple_phi_result (phi))
4673 && loc != UNKNOWN_LOCATION)
4674 {
4675 error ("virtual PHI with argument locations");
4676 err2 = true;
4677 }
4678 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4679 if (addr)
4680 {
4681 debug_generic_expr (addr);
4682 err2 = true;
4683 }
4684 err2 |= verify_location (blocks, loc);
4685 }
4686
4687 if (err2)
4688 debug_gimple_stmt (phi);
4689 err |= err2;
4690 }
4691
4692 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4693 {
4694 gimple stmt = gsi_stmt (gsi);
4695 bool err2 = false;
4696 struct walk_stmt_info wi;
4697 tree addr;
4698 int lp_nr;
4699
4700 pointer_set_insert (visited_stmts, stmt);
4701
4702 if (gimple_bb (stmt) != bb)
4703 {
4704 error ("gimple_bb (stmt) is set to a wrong basic block");
4705 err2 = true;
4706 }
4707
4708 err2 |= verify_gimple_stmt (stmt);
4709 err2 |= verify_location (blocks, gimple_location (stmt));
4710
4711 memset (&wi, 0, sizeof (wi));
4712 wi.info = (void *) visited;
4713 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4714 if (addr)
4715 {
4716 error ("incorrect sharing of tree nodes");
4717 debug_generic_expr (addr);
4718 err2 |= true;
4719 }
4720
4721 memset (&wi, 0, sizeof (wi));
4722 wi.info = (void *) blocks;
4723 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4724 if (addr)
4725 {
4726 debug_generic_expr (addr);
4727 err2 |= true;
4728 }
4729
4730 /* ??? Instead of not checking these stmts at all the walker
4731 should know its context via wi. */
4732 if (!is_gimple_debug (stmt)
4733 && !is_gimple_omp (stmt))
4734 {
4735 memset (&wi, 0, sizeof (wi));
4736 addr = walk_gimple_op (stmt, verify_expr, &wi);
4737 if (addr)
4738 {
4739 debug_generic_expr (addr);
4740 inform (gimple_location (stmt), "in statement");
4741 err2 |= true;
4742 }
4743 }
4744
4745 /* If the statement is marked as part of an EH region, then it is
4746 expected that the statement could throw. Verify that when we
4747 have optimizations that simplify statements such that we prove
4748 that they cannot throw, that we update other data structures
4749 to match. */
4750 lp_nr = lookup_stmt_eh_lp (stmt);
4751 if (lp_nr != 0)
4752 {
4753 if (!stmt_could_throw_p (stmt))
4754 {
4755 error ("statement marked for throw, but doesn%'t");
4756 err2 |= true;
4757 }
4758 else if (lp_nr > 0
4759 && !gsi_one_before_end_p (gsi)
4760 && stmt_can_throw_internal (stmt))
4761 {
4762 error ("statement marked for throw in middle of block");
4763 err2 |= true;
4764 }
4765 }
4766
4767 if (err2)
4768 debug_gimple_stmt (stmt);
4769 err |= err2;
4770 }
4771 }
4772
4773 eh_error_found = false;
4774 if (get_eh_throw_stmt_table (cfun))
4775 htab_traverse (get_eh_throw_stmt_table (cfun),
4776 verify_eh_throw_stmt_node,
4777 visited_stmts);
4778
4779 if (err || eh_error_found)
4780 internal_error ("verify_gimple failed");
4781
4782 pointer_set_destroy (visited);
4783 pointer_set_destroy (visited_stmts);
4784 pointer_set_destroy (blocks);
4785 verify_histograms ();
4786 timevar_pop (TV_TREE_STMT_VERIFY);
4787 }
4788
4789
4790 /* Verifies that the flow information is OK. */
4791
4792 static int
4793 gimple_verify_flow_info (void)
4794 {
4795 int err = 0;
4796 basic_block bb;
4797 gimple_stmt_iterator gsi;
4798 gimple stmt;
4799 edge e;
4800 edge_iterator ei;
4801
4802 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4803 {
4804 error ("ENTRY_BLOCK has IL associated with it");
4805 err = 1;
4806 }
4807
4808 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4809 {
4810 error ("EXIT_BLOCK has IL associated with it");
4811 err = 1;
4812 }
4813
4814 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4815 if (e->flags & EDGE_FALLTHRU)
4816 {
4817 error ("fallthru to exit from bb %d", e->src->index);
4818 err = 1;
4819 }
4820
4821 FOR_EACH_BB (bb)
4822 {
4823 bool found_ctrl_stmt = false;
4824
4825 stmt = NULL;
4826
4827 /* Skip labels on the start of basic block. */
4828 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4829 {
4830 tree label;
4831 gimple prev_stmt = stmt;
4832
4833 stmt = gsi_stmt (gsi);
4834
4835 if (gimple_code (stmt) != GIMPLE_LABEL)
4836 break;
4837
4838 label = gimple_label_label (stmt);
4839 if (prev_stmt && DECL_NONLOCAL (label))
4840 {
4841 error ("nonlocal label ");
4842 print_generic_expr (stderr, label, 0);
4843 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4844 bb->index);
4845 err = 1;
4846 }
4847
4848 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4849 {
4850 error ("EH landing pad label ");
4851 print_generic_expr (stderr, label, 0);
4852 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4853 bb->index);
4854 err = 1;
4855 }
4856
4857 if (label_to_block (label) != bb)
4858 {
4859 error ("label ");
4860 print_generic_expr (stderr, label, 0);
4861 fprintf (stderr, " to block does not match in bb %d",
4862 bb->index);
4863 err = 1;
4864 }
4865
4866 if (decl_function_context (label) != current_function_decl)
4867 {
4868 error ("label ");
4869 print_generic_expr (stderr, label, 0);
4870 fprintf (stderr, " has incorrect context in bb %d",
4871 bb->index);
4872 err = 1;
4873 }
4874 }
4875
4876 /* Verify that body of basic block BB is free of control flow. */
4877 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4878 {
4879 gimple stmt = gsi_stmt (gsi);
4880
4881 if (found_ctrl_stmt)
4882 {
4883 error ("control flow in the middle of basic block %d",
4884 bb->index);
4885 err = 1;
4886 }
4887
4888 if (stmt_ends_bb_p (stmt))
4889 found_ctrl_stmt = true;
4890
4891 if (gimple_code (stmt) == GIMPLE_LABEL)
4892 {
4893 error ("label ");
4894 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4895 fprintf (stderr, " in the middle of basic block %d", bb->index);
4896 err = 1;
4897 }
4898 }
4899
4900 gsi = gsi_last_bb (bb);
4901 if (gsi_end_p (gsi))
4902 continue;
4903
4904 stmt = gsi_stmt (gsi);
4905
4906 if (gimple_code (stmt) == GIMPLE_LABEL)
4907 continue;
4908
4909 err |= verify_eh_edges (stmt);
4910
4911 if (is_ctrl_stmt (stmt))
4912 {
4913 FOR_EACH_EDGE (e, ei, bb->succs)
4914 if (e->flags & EDGE_FALLTHRU)
4915 {
4916 error ("fallthru edge after a control statement in bb %d",
4917 bb->index);
4918 err = 1;
4919 }
4920 }
4921
4922 if (gimple_code (stmt) != GIMPLE_COND)
4923 {
4924 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4925 after anything else but if statement. */
4926 FOR_EACH_EDGE (e, ei, bb->succs)
4927 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4928 {
4929 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4930 bb->index);
4931 err = 1;
4932 }
4933 }
4934
4935 switch (gimple_code (stmt))
4936 {
4937 case GIMPLE_COND:
4938 {
4939 edge true_edge;
4940 edge false_edge;
4941
4942 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4943
4944 if (!true_edge
4945 || !false_edge
4946 || !(true_edge->flags & EDGE_TRUE_VALUE)
4947 || !(false_edge->flags & EDGE_FALSE_VALUE)
4948 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4949 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4950 || EDGE_COUNT (bb->succs) >= 3)
4951 {
4952 error ("wrong outgoing edge flags at end of bb %d",
4953 bb->index);
4954 err = 1;
4955 }
4956 }
4957 break;
4958
4959 case GIMPLE_GOTO:
4960 if (simple_goto_p (stmt))
4961 {
4962 error ("explicit goto at end of bb %d", bb->index);
4963 err = 1;
4964 }
4965 else
4966 {
4967 /* FIXME. We should double check that the labels in the
4968 destination blocks have their address taken. */
4969 FOR_EACH_EDGE (e, ei, bb->succs)
4970 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4971 | EDGE_FALSE_VALUE))
4972 || !(e->flags & EDGE_ABNORMAL))
4973 {
4974 error ("wrong outgoing edge flags at end of bb %d",
4975 bb->index);
4976 err = 1;
4977 }
4978 }
4979 break;
4980
4981 case GIMPLE_CALL:
4982 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4983 break;
4984 /* ... fallthru ... */
4985 case GIMPLE_RETURN:
4986 if (!single_succ_p (bb)
4987 || (single_succ_edge (bb)->flags
4988 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4989 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4990 {
4991 error ("wrong outgoing edge flags at end of bb %d", bb->index);
4992 err = 1;
4993 }
4994 if (single_succ (bb) != EXIT_BLOCK_PTR)
4995 {
4996 error ("return edge does not point to exit in bb %d",
4997 bb->index);
4998 err = 1;
4999 }
5000 break;
5001
5002 case GIMPLE_SWITCH:
5003 {
5004 tree prev;
5005 edge e;
5006 size_t i, n;
5007
5008 n = gimple_switch_num_labels (stmt);
5009
5010 /* Mark all the destination basic blocks. */
5011 for (i = 0; i < n; ++i)
5012 {
5013 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5014 basic_block label_bb = label_to_block (lab);
5015 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5016 label_bb->aux = (void *)1;
5017 }
5018
5019 /* Verify that the case labels are sorted. */
5020 prev = gimple_switch_label (stmt, 0);
5021 for (i = 1; i < n; ++i)
5022 {
5023 tree c = gimple_switch_label (stmt, i);
5024 if (!CASE_LOW (c))
5025 {
5026 error ("found default case not at the start of "
5027 "case vector");
5028 err = 1;
5029 continue;
5030 }
5031 if (CASE_LOW (prev)
5032 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5033 {
5034 error ("case labels not sorted: ");
5035 print_generic_expr (stderr, prev, 0);
5036 fprintf (stderr," is greater than ");
5037 print_generic_expr (stderr, c, 0);
5038 fprintf (stderr," but comes before it.\n");
5039 err = 1;
5040 }
5041 prev = c;
5042 }
5043 /* VRP will remove the default case if it can prove it will
5044 never be executed. So do not verify there always exists
5045 a default case here. */
5046
5047 FOR_EACH_EDGE (e, ei, bb->succs)
5048 {
5049 if (!e->dest->aux)
5050 {
5051 error ("extra outgoing edge %d->%d",
5052 bb->index, e->dest->index);
5053 err = 1;
5054 }
5055
5056 e->dest->aux = (void *)2;
5057 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5058 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5059 {
5060 error ("wrong outgoing edge flags at end of bb %d",
5061 bb->index);
5062 err = 1;
5063 }
5064 }
5065
5066 /* Check that we have all of them. */
5067 for (i = 0; i < n; ++i)
5068 {
5069 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5070 basic_block label_bb = label_to_block (lab);
5071
5072 if (label_bb->aux != (void *)2)
5073 {
5074 error ("missing edge %i->%i", bb->index, label_bb->index);
5075 err = 1;
5076 }
5077 }
5078
5079 FOR_EACH_EDGE (e, ei, bb->succs)
5080 e->dest->aux = (void *)0;
5081 }
5082 break;
5083
5084 case GIMPLE_EH_DISPATCH:
5085 err |= verify_eh_dispatch_edge (stmt);
5086 break;
5087
5088 default:
5089 break;
5090 }
5091 }
5092
5093 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5094 verify_dominators (CDI_DOMINATORS);
5095
5096 return err;
5097 }
5098
5099
5100 /* Updates phi nodes after creating a forwarder block joined
5101 by edge FALLTHRU. */
5102
5103 static void
5104 gimple_make_forwarder_block (edge fallthru)
5105 {
5106 edge e;
5107 edge_iterator ei;
5108 basic_block dummy, bb;
5109 tree var;
5110 gimple_stmt_iterator gsi;
5111
5112 dummy = fallthru->src;
5113 bb = fallthru->dest;
5114
5115 if (single_pred_p (bb))
5116 return;
5117
5118 /* If we redirected a branch we must create new PHI nodes at the
5119 start of BB. */
5120 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5121 {
5122 gimple phi, new_phi;
5123
5124 phi = gsi_stmt (gsi);
5125 var = gimple_phi_result (phi);
5126 new_phi = create_phi_node (var, bb);
5127 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5128 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5129 UNKNOWN_LOCATION);
5130 }
5131
5132 /* Add the arguments we have stored on edges. */
5133 FOR_EACH_EDGE (e, ei, bb->preds)
5134 {
5135 if (e == fallthru)
5136 continue;
5137
5138 flush_pending_stmts (e);
5139 }
5140 }
5141
5142
5143 /* Return a non-special label in the head of basic block BLOCK.
5144 Create one if it doesn't exist. */
5145
5146 tree
5147 gimple_block_label (basic_block bb)
5148 {
5149 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5150 bool first = true;
5151 tree label;
5152 gimple stmt;
5153
5154 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5155 {
5156 stmt = gsi_stmt (i);
5157 if (gimple_code (stmt) != GIMPLE_LABEL)
5158 break;
5159 label = gimple_label_label (stmt);
5160 if (!DECL_NONLOCAL (label))
5161 {
5162 if (!first)
5163 gsi_move_before (&i, &s);
5164 return label;
5165 }
5166 }
5167
5168 label = create_artificial_label (UNKNOWN_LOCATION);
5169 stmt = gimple_build_label (label);
5170 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5171 return label;
5172 }
5173
5174
5175 /* Attempt to perform edge redirection by replacing a possibly complex
5176 jump instruction by a goto or by removing the jump completely.
5177 This can apply only if all edges now point to the same block. The
5178 parameters and return values are equivalent to
5179 redirect_edge_and_branch. */
5180
5181 static edge
5182 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5183 {
5184 basic_block src = e->src;
5185 gimple_stmt_iterator i;
5186 gimple stmt;
5187
5188 /* We can replace or remove a complex jump only when we have exactly
5189 two edges. */
5190 if (EDGE_COUNT (src->succs) != 2
5191 /* Verify that all targets will be TARGET. Specifically, the
5192 edge that is not E must also go to TARGET. */
5193 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5194 return NULL;
5195
5196 i = gsi_last_bb (src);
5197 if (gsi_end_p (i))
5198 return NULL;
5199
5200 stmt = gsi_stmt (i);
5201
5202 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5203 {
5204 gsi_remove (&i, true);
5205 e = ssa_redirect_edge (e, target);
5206 e->flags = EDGE_FALLTHRU;
5207 return e;
5208 }
5209
5210 return NULL;
5211 }
5212
5213
5214 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5215 edge representing the redirected branch. */
5216
5217 static edge
5218 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5219 {
5220 basic_block bb = e->src;
5221 gimple_stmt_iterator gsi;
5222 edge ret;
5223 gimple stmt;
5224
5225 if (e->flags & EDGE_ABNORMAL)
5226 return NULL;
5227
5228 if (e->dest == dest)
5229 return NULL;
5230
5231 if (e->flags & EDGE_EH)
5232 return redirect_eh_edge (e, dest);
5233
5234 if (e->src != ENTRY_BLOCK_PTR)
5235 {
5236 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5237 if (ret)
5238 return ret;
5239 }
5240
5241 gsi = gsi_last_bb (bb);
5242 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5243
5244 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5245 {
5246 case GIMPLE_COND:
5247 /* For COND_EXPR, we only need to redirect the edge. */
5248 break;
5249
5250 case GIMPLE_GOTO:
5251 /* No non-abnormal edges should lead from a non-simple goto, and
5252 simple ones should be represented implicitly. */
5253 gcc_unreachable ();
5254
5255 case GIMPLE_SWITCH:
5256 {
5257 tree label = gimple_block_label (dest);
5258 tree cases = get_cases_for_edge (e, stmt);
5259
5260 /* If we have a list of cases associated with E, then use it
5261 as it's a lot faster than walking the entire case vector. */
5262 if (cases)
5263 {
5264 edge e2 = find_edge (e->src, dest);
5265 tree last, first;
5266
5267 first = cases;
5268 while (cases)
5269 {
5270 last = cases;
5271 CASE_LABEL (cases) = label;
5272 cases = CASE_CHAIN (cases);
5273 }
5274
5275 /* If there was already an edge in the CFG, then we need
5276 to move all the cases associated with E to E2. */
5277 if (e2)
5278 {
5279 tree cases2 = get_cases_for_edge (e2, stmt);
5280
5281 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5282 CASE_CHAIN (cases2) = first;
5283 }
5284 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5285 }
5286 else
5287 {
5288 size_t i, n = gimple_switch_num_labels (stmt);
5289
5290 for (i = 0; i < n; i++)
5291 {
5292 tree elt = gimple_switch_label (stmt, i);
5293 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5294 CASE_LABEL (elt) = label;
5295 }
5296 }
5297 }
5298 break;
5299
5300 case GIMPLE_ASM:
5301 {
5302 int i, n = gimple_asm_nlabels (stmt);
5303 tree label = NULL;
5304
5305 for (i = 0; i < n; ++i)
5306 {
5307 tree cons = gimple_asm_label_op (stmt, i);
5308 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5309 {
5310 if (!label)
5311 label = gimple_block_label (dest);
5312 TREE_VALUE (cons) = label;
5313 }
5314 }
5315
5316 /* If we didn't find any label matching the former edge in the
5317 asm labels, we must be redirecting the fallthrough
5318 edge. */
5319 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5320 }
5321 break;
5322
5323 case GIMPLE_RETURN:
5324 gsi_remove (&gsi, true);
5325 e->flags |= EDGE_FALLTHRU;
5326 break;
5327
5328 case GIMPLE_OMP_RETURN:
5329 case GIMPLE_OMP_CONTINUE:
5330 case GIMPLE_OMP_SECTIONS_SWITCH:
5331 case GIMPLE_OMP_FOR:
5332 /* The edges from OMP constructs can be simply redirected. */
5333 break;
5334
5335 case GIMPLE_EH_DISPATCH:
5336 if (!(e->flags & EDGE_FALLTHRU))
5337 redirect_eh_dispatch_edge (stmt, e, dest);
5338 break;
5339
5340 case GIMPLE_TRANSACTION:
5341 /* The ABORT edge has a stored label associated with it, otherwise
5342 the edges are simply redirectable. */
5343 if (e->flags == 0)
5344 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5345 break;
5346
5347 default:
5348 /* Otherwise it must be a fallthru edge, and we don't need to
5349 do anything besides redirecting it. */
5350 gcc_assert (e->flags & EDGE_FALLTHRU);
5351 break;
5352 }
5353
5354 /* Update/insert PHI nodes as necessary. */
5355
5356 /* Now update the edges in the CFG. */
5357 e = ssa_redirect_edge (e, dest);
5358
5359 return e;
5360 }
5361
5362 /* Returns true if it is possible to remove edge E by redirecting
5363 it to the destination of the other edge from E->src. */
5364
5365 static bool
5366 gimple_can_remove_branch_p (const_edge e)
5367 {
5368 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5369 return false;
5370
5371 return true;
5372 }
5373
5374 /* Simple wrapper, as we can always redirect fallthru edges. */
5375
5376 static basic_block
5377 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5378 {
5379 e = gimple_redirect_edge_and_branch (e, dest);
5380 gcc_assert (e);
5381
5382 return NULL;
5383 }
5384
5385
5386 /* Splits basic block BB after statement STMT (but at least after the
5387 labels). If STMT is NULL, BB is split just after the labels. */
5388
5389 static basic_block
5390 gimple_split_block (basic_block bb, void *stmt)
5391 {
5392 gimple_stmt_iterator gsi;
5393 gimple_stmt_iterator gsi_tgt;
5394 gimple act;
5395 gimple_seq list;
5396 basic_block new_bb;
5397 edge e;
5398 edge_iterator ei;
5399
5400 new_bb = create_empty_bb (bb);
5401
5402 /* Redirect the outgoing edges. */
5403 new_bb->succs = bb->succs;
5404 bb->succs = NULL;
5405 FOR_EACH_EDGE (e, ei, new_bb->succs)
5406 e->src = new_bb;
5407
5408 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5409 stmt = NULL;
5410
5411 /* Move everything from GSI to the new basic block. */
5412 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5413 {
5414 act = gsi_stmt (gsi);
5415 if (gimple_code (act) == GIMPLE_LABEL)
5416 continue;
5417
5418 if (!stmt)
5419 break;
5420
5421 if (stmt == act)
5422 {
5423 gsi_next (&gsi);
5424 break;
5425 }
5426 }
5427
5428 if (gsi_end_p (gsi))
5429 return new_bb;
5430
5431 /* Split the statement list - avoid re-creating new containers as this
5432 brings ugly quadratic memory consumption in the inliner.
5433 (We are still quadratic since we need to update stmt BB pointers,
5434 sadly.) */
5435 gsi_split_seq_before (&gsi, &list);
5436 set_bb_seq (new_bb, list);
5437 for (gsi_tgt = gsi_start (list);
5438 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5439 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5440
5441 return new_bb;
5442 }
5443
5444
5445 /* Moves basic block BB after block AFTER. */
5446
5447 static bool
5448 gimple_move_block_after (basic_block bb, basic_block after)
5449 {
5450 if (bb->prev_bb == after)
5451 return true;
5452
5453 unlink_block (bb);
5454 link_block (bb, after);
5455
5456 return true;
5457 }
5458
5459
5460 /* Return TRUE if block BB has no executable statements, otherwise return
5461 FALSE. */
5462
5463 static bool
5464 gimple_empty_block_p (basic_block bb)
5465 {
5466 /* BB must have no executable statements. */
5467 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5468 if (phi_nodes (bb))
5469 return false;
5470 if (gsi_end_p (gsi))
5471 return true;
5472 if (is_gimple_debug (gsi_stmt (gsi)))
5473 gsi_next_nondebug (&gsi);
5474 return gsi_end_p (gsi);
5475 }
5476
5477
5478 /* Split a basic block if it ends with a conditional branch and if the
5479 other part of the block is not empty. */
5480
5481 static basic_block
5482 gimple_split_block_before_cond_jump (basic_block bb)
5483 {
5484 gimple last, split_point;
5485 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5486 if (gsi_end_p (gsi))
5487 return NULL;
5488 last = gsi_stmt (gsi);
5489 if (gimple_code (last) != GIMPLE_COND
5490 && gimple_code (last) != GIMPLE_SWITCH)
5491 return NULL;
5492 gsi_prev_nondebug (&gsi);
5493 split_point = gsi_stmt (gsi);
5494 return split_block (bb, split_point)->dest;
5495 }
5496
5497
5498 /* Return true if basic_block can be duplicated. */
5499
5500 static bool
5501 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5502 {
5503 return true;
5504 }
5505
5506 /* Create a duplicate of the basic block BB. NOTE: This does not
5507 preserve SSA form. */
5508
5509 static basic_block
5510 gimple_duplicate_bb (basic_block bb)
5511 {
5512 basic_block new_bb;
5513 gimple_stmt_iterator gsi, gsi_tgt;
5514 gimple_seq phis = phi_nodes (bb);
5515 gimple phi, stmt, copy;
5516
5517 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5518
5519 /* Copy the PHI nodes. We ignore PHI node arguments here because
5520 the incoming edges have not been setup yet. */
5521 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5522 {
5523 phi = gsi_stmt (gsi);
5524 copy = create_phi_node (NULL_TREE, new_bb);
5525 create_new_def_for (gimple_phi_result (phi), copy,
5526 gimple_phi_result_ptr (copy));
5527 gimple_set_uid (copy, gimple_uid (phi));
5528 }
5529
5530 gsi_tgt = gsi_start_bb (new_bb);
5531 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5532 {
5533 def_operand_p def_p;
5534 ssa_op_iter op_iter;
5535 tree lhs;
5536
5537 stmt = gsi_stmt (gsi);
5538 if (gimple_code (stmt) == GIMPLE_LABEL)
5539 continue;
5540
5541 /* Don't duplicate label debug stmts. */
5542 if (gimple_debug_bind_p (stmt)
5543 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5544 == LABEL_DECL)
5545 continue;
5546
5547 /* Create a new copy of STMT and duplicate STMT's virtual
5548 operands. */
5549 copy = gimple_copy (stmt);
5550 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5551
5552 maybe_duplicate_eh_stmt (copy, stmt);
5553 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5554
5555 /* When copying around a stmt writing into a local non-user
5556 aggregate, make sure it won't share stack slot with other
5557 vars. */
5558 lhs = gimple_get_lhs (stmt);
5559 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5560 {
5561 tree base = get_base_address (lhs);
5562 if (base
5563 && (TREE_CODE (base) == VAR_DECL
5564 || TREE_CODE (base) == RESULT_DECL)
5565 && DECL_IGNORED_P (base)
5566 && !TREE_STATIC (base)
5567 && !DECL_EXTERNAL (base)
5568 && (TREE_CODE (base) != VAR_DECL
5569 || !DECL_HAS_VALUE_EXPR_P (base)))
5570 DECL_NONSHAREABLE (base) = 1;
5571 }
5572
5573 /* Create new names for all the definitions created by COPY and
5574 add replacement mappings for each new name. */
5575 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5576 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5577 }
5578
5579 return new_bb;
5580 }
5581
5582 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5583
5584 static void
5585 add_phi_args_after_copy_edge (edge e_copy)
5586 {
5587 basic_block bb, bb_copy = e_copy->src, dest;
5588 edge e;
5589 edge_iterator ei;
5590 gimple phi, phi_copy;
5591 tree def;
5592 gimple_stmt_iterator psi, psi_copy;
5593
5594 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5595 return;
5596
5597 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5598
5599 if (e_copy->dest->flags & BB_DUPLICATED)
5600 dest = get_bb_original (e_copy->dest);
5601 else
5602 dest = e_copy->dest;
5603
5604 e = find_edge (bb, dest);
5605 if (!e)
5606 {
5607 /* During loop unrolling the target of the latch edge is copied.
5608 In this case we are not looking for edge to dest, but to
5609 duplicated block whose original was dest. */
5610 FOR_EACH_EDGE (e, ei, bb->succs)
5611 {
5612 if ((e->dest->flags & BB_DUPLICATED)
5613 && get_bb_original (e->dest) == dest)
5614 break;
5615 }
5616
5617 gcc_assert (e != NULL);
5618 }
5619
5620 for (psi = gsi_start_phis (e->dest),
5621 psi_copy = gsi_start_phis (e_copy->dest);
5622 !gsi_end_p (psi);
5623 gsi_next (&psi), gsi_next (&psi_copy))
5624 {
5625 phi = gsi_stmt (psi);
5626 phi_copy = gsi_stmt (psi_copy);
5627 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5628 add_phi_arg (phi_copy, def, e_copy,
5629 gimple_phi_arg_location_from_edge (phi, e));
5630 }
5631 }
5632
5633
5634 /* Basic block BB_COPY was created by code duplication. Add phi node
5635 arguments for edges going out of BB_COPY. The blocks that were
5636 duplicated have BB_DUPLICATED set. */
5637
5638 void
5639 add_phi_args_after_copy_bb (basic_block bb_copy)
5640 {
5641 edge e_copy;
5642 edge_iterator ei;
5643
5644 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5645 {
5646 add_phi_args_after_copy_edge (e_copy);
5647 }
5648 }
5649
5650 /* Blocks in REGION_COPY array of length N_REGION were created by
5651 duplication of basic blocks. Add phi node arguments for edges
5652 going from these blocks. If E_COPY is not NULL, also add
5653 phi node arguments for its destination.*/
5654
5655 void
5656 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5657 edge e_copy)
5658 {
5659 unsigned i;
5660
5661 for (i = 0; i < n_region; i++)
5662 region_copy[i]->flags |= BB_DUPLICATED;
5663
5664 for (i = 0; i < n_region; i++)
5665 add_phi_args_after_copy_bb (region_copy[i]);
5666 if (e_copy)
5667 add_phi_args_after_copy_edge (e_copy);
5668
5669 for (i = 0; i < n_region; i++)
5670 region_copy[i]->flags &= ~BB_DUPLICATED;
5671 }
5672
5673 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5674 important exit edge EXIT. By important we mean that no SSA name defined
5675 inside region is live over the other exit edges of the region. All entry
5676 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5677 to the duplicate of the region. Dominance and loop information is
5678 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5679 UPDATE_DOMINANCE is false then we assume that the caller will update the
5680 dominance information after calling this function. The new basic
5681 blocks are stored to REGION_COPY in the same order as they had in REGION,
5682 provided that REGION_COPY is not NULL.
5683 The function returns false if it is unable to copy the region,
5684 true otherwise. */
5685
5686 bool
5687 gimple_duplicate_sese_region (edge entry, edge exit,
5688 basic_block *region, unsigned n_region,
5689 basic_block *region_copy,
5690 bool update_dominance)
5691 {
5692 unsigned i;
5693 bool free_region_copy = false, copying_header = false;
5694 struct loop *loop = entry->dest->loop_father;
5695 edge exit_copy;
5696 vec<basic_block> doms;
5697 edge redirected;
5698 int total_freq = 0, entry_freq = 0;
5699 gcov_type total_count = 0, entry_count = 0;
5700
5701 if (!can_copy_bbs_p (region, n_region))
5702 return false;
5703
5704 /* Some sanity checking. Note that we do not check for all possible
5705 missuses of the functions. I.e. if you ask to copy something weird,
5706 it will work, but the state of structures probably will not be
5707 correct. */
5708 for (i = 0; i < n_region; i++)
5709 {
5710 /* We do not handle subloops, i.e. all the blocks must belong to the
5711 same loop. */
5712 if (region[i]->loop_father != loop)
5713 return false;
5714
5715 if (region[i] != entry->dest
5716 && region[i] == loop->header)
5717 return false;
5718 }
5719
5720 set_loop_copy (loop, loop);
5721
5722 /* In case the function is used for loop header copying (which is the primary
5723 use), ensure that EXIT and its copy will be new latch and entry edges. */
5724 if (loop->header == entry->dest)
5725 {
5726 copying_header = true;
5727 set_loop_copy (loop, loop_outer (loop));
5728
5729 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5730 return false;
5731
5732 for (i = 0; i < n_region; i++)
5733 if (region[i] != exit->src
5734 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5735 return false;
5736 }
5737
5738 if (!region_copy)
5739 {
5740 region_copy = XNEWVEC (basic_block, n_region);
5741 free_region_copy = true;
5742 }
5743
5744 initialize_original_copy_tables ();
5745
5746 /* Record blocks outside the region that are dominated by something
5747 inside. */
5748 if (update_dominance)
5749 {
5750 doms.create (0);
5751 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5752 }
5753
5754 if (entry->dest->count)
5755 {
5756 total_count = entry->dest->count;
5757 entry_count = entry->count;
5758 /* Fix up corner cases, to avoid division by zero or creation of negative
5759 frequencies. */
5760 if (entry_count > total_count)
5761 entry_count = total_count;
5762 }
5763 else
5764 {
5765 total_freq = entry->dest->frequency;
5766 entry_freq = EDGE_FREQUENCY (entry);
5767 /* Fix up corner cases, to avoid division by zero or creation of negative
5768 frequencies. */
5769 if (total_freq == 0)
5770 total_freq = 1;
5771 else if (entry_freq > total_freq)
5772 entry_freq = total_freq;
5773 }
5774
5775 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5776 split_edge_bb_loc (entry), update_dominance);
5777 if (total_count)
5778 {
5779 scale_bbs_frequencies_gcov_type (region, n_region,
5780 total_count - entry_count,
5781 total_count);
5782 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5783 total_count);
5784 }
5785 else
5786 {
5787 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5788 total_freq);
5789 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5790 }
5791
5792 if (copying_header)
5793 {
5794 loop->header = exit->dest;
5795 loop->latch = exit->src;
5796 }
5797
5798 /* Redirect the entry and add the phi node arguments. */
5799 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5800 gcc_assert (redirected != NULL);
5801 flush_pending_stmts (entry);
5802
5803 /* Concerning updating of dominators: We must recount dominators
5804 for entry block and its copy. Anything that is outside of the
5805 region, but was dominated by something inside needs recounting as
5806 well. */
5807 if (update_dominance)
5808 {
5809 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5810 doms.safe_push (get_bb_original (entry->dest));
5811 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5812 doms.release ();
5813 }
5814
5815 /* Add the other PHI node arguments. */
5816 add_phi_args_after_copy (region_copy, n_region, NULL);
5817
5818 if (free_region_copy)
5819 free (region_copy);
5820
5821 free_original_copy_tables ();
5822 return true;
5823 }
5824
5825 /* Checks if BB is part of the region defined by N_REGION BBS. */
5826 static bool
5827 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5828 {
5829 unsigned int n;
5830
5831 for (n = 0; n < n_region; n++)
5832 {
5833 if (bb == bbs[n])
5834 return true;
5835 }
5836 return false;
5837 }
5838
5839 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5840 are stored to REGION_COPY in the same order in that they appear
5841 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5842 the region, EXIT an exit from it. The condition guarding EXIT
5843 is moved to ENTRY. Returns true if duplication succeeds, false
5844 otherwise.
5845
5846 For example,
5847
5848 some_code;
5849 if (cond)
5850 A;
5851 else
5852 B;
5853
5854 is transformed to
5855
5856 if (cond)
5857 {
5858 some_code;
5859 A;
5860 }
5861 else
5862 {
5863 some_code;
5864 B;
5865 }
5866 */
5867
5868 bool
5869 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5870 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5871 basic_block *region_copy ATTRIBUTE_UNUSED)
5872 {
5873 unsigned i;
5874 bool free_region_copy = false;
5875 struct loop *loop = exit->dest->loop_father;
5876 struct loop *orig_loop = entry->dest->loop_father;
5877 basic_block switch_bb, entry_bb, nentry_bb;
5878 vec<basic_block> doms;
5879 int total_freq = 0, exit_freq = 0;
5880 gcov_type total_count = 0, exit_count = 0;
5881 edge exits[2], nexits[2], e;
5882 gimple_stmt_iterator gsi;
5883 gimple cond_stmt;
5884 edge sorig, snew;
5885 basic_block exit_bb;
5886 gimple_stmt_iterator psi;
5887 gimple phi;
5888 tree def;
5889 struct loop *target, *aloop, *cloop;
5890
5891 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5892 exits[0] = exit;
5893 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5894
5895 if (!can_copy_bbs_p (region, n_region))
5896 return false;
5897
5898 initialize_original_copy_tables ();
5899 set_loop_copy (orig_loop, loop);
5900
5901 target= loop;
5902 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5903 {
5904 if (bb_part_of_region_p (aloop->header, region, n_region))
5905 {
5906 cloop = duplicate_loop (aloop, target);
5907 duplicate_subloops (aloop, cloop);
5908 }
5909 }
5910
5911 if (!region_copy)
5912 {
5913 region_copy = XNEWVEC (basic_block, n_region);
5914 free_region_copy = true;
5915 }
5916
5917 gcc_assert (!need_ssa_update_p (cfun));
5918
5919 /* Record blocks outside the region that are dominated by something
5920 inside. */
5921 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5922
5923 if (exit->src->count)
5924 {
5925 total_count = exit->src->count;
5926 exit_count = exit->count;
5927 /* Fix up corner cases, to avoid division by zero or creation of negative
5928 frequencies. */
5929 if (exit_count > total_count)
5930 exit_count = total_count;
5931 }
5932 else
5933 {
5934 total_freq = exit->src->frequency;
5935 exit_freq = EDGE_FREQUENCY (exit);
5936 /* Fix up corner cases, to avoid division by zero or creation of negative
5937 frequencies. */
5938 if (total_freq == 0)
5939 total_freq = 1;
5940 if (exit_freq > total_freq)
5941 exit_freq = total_freq;
5942 }
5943
5944 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5945 split_edge_bb_loc (exit), true);
5946 if (total_count)
5947 {
5948 scale_bbs_frequencies_gcov_type (region, n_region,
5949 total_count - exit_count,
5950 total_count);
5951 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5952 total_count);
5953 }
5954 else
5955 {
5956 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5957 total_freq);
5958 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5959 }
5960
5961 /* Create the switch block, and put the exit condition to it. */
5962 entry_bb = entry->dest;
5963 nentry_bb = get_bb_copy (entry_bb);
5964 if (!last_stmt (entry->src)
5965 || !stmt_ends_bb_p (last_stmt (entry->src)))
5966 switch_bb = entry->src;
5967 else
5968 switch_bb = split_edge (entry);
5969 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5970
5971 gsi = gsi_last_bb (switch_bb);
5972 cond_stmt = last_stmt (exit->src);
5973 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5974 cond_stmt = gimple_copy (cond_stmt);
5975
5976 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5977
5978 sorig = single_succ_edge (switch_bb);
5979 sorig->flags = exits[1]->flags;
5980 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5981
5982 /* Register the new edge from SWITCH_BB in loop exit lists. */
5983 rescan_loop_exit (snew, true, false);
5984
5985 /* Add the PHI node arguments. */
5986 add_phi_args_after_copy (region_copy, n_region, snew);
5987
5988 /* Get rid of now superfluous conditions and associated edges (and phi node
5989 arguments). */
5990 exit_bb = exit->dest;
5991
5992 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
5993 PENDING_STMT (e) = NULL;
5994
5995 /* The latch of ORIG_LOOP was copied, and so was the backedge
5996 to the original header. We redirect this backedge to EXIT_BB. */
5997 for (i = 0; i < n_region; i++)
5998 if (get_bb_original (region_copy[i]) == orig_loop->latch)
5999 {
6000 gcc_assert (single_succ_edge (region_copy[i]));
6001 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6002 PENDING_STMT (e) = NULL;
6003 for (psi = gsi_start_phis (exit_bb);
6004 !gsi_end_p (psi);
6005 gsi_next (&psi))
6006 {
6007 phi = gsi_stmt (psi);
6008 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6009 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6010 }
6011 }
6012 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6013 PENDING_STMT (e) = NULL;
6014
6015 /* Anything that is outside of the region, but was dominated by something
6016 inside needs to update dominance info. */
6017 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6018 doms.release ();
6019 /* Update the SSA web. */
6020 update_ssa (TODO_update_ssa);
6021
6022 if (free_region_copy)
6023 free (region_copy);
6024
6025 free_original_copy_tables ();
6026 return true;
6027 }
6028
6029 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6030 adding blocks when the dominator traversal reaches EXIT. This
6031 function silently assumes that ENTRY strictly dominates EXIT. */
6032
6033 void
6034 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6035 vec<basic_block> *bbs_p)
6036 {
6037 basic_block son;
6038
6039 for (son = first_dom_son (CDI_DOMINATORS, entry);
6040 son;
6041 son = next_dom_son (CDI_DOMINATORS, son))
6042 {
6043 bbs_p->safe_push (son);
6044 if (son != exit)
6045 gather_blocks_in_sese_region (son, exit, bbs_p);
6046 }
6047 }
6048
6049 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6050 The duplicates are recorded in VARS_MAP. */
6051
6052 static void
6053 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6054 tree to_context)
6055 {
6056 tree t = *tp, new_t;
6057 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6058 void **loc;
6059
6060 if (DECL_CONTEXT (t) == to_context)
6061 return;
6062
6063 loc = pointer_map_contains (vars_map, t);
6064
6065 if (!loc)
6066 {
6067 loc = pointer_map_insert (vars_map, t);
6068
6069 if (SSA_VAR_P (t))
6070 {
6071 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6072 add_local_decl (f, new_t);
6073 }
6074 else
6075 {
6076 gcc_assert (TREE_CODE (t) == CONST_DECL);
6077 new_t = copy_node (t);
6078 }
6079 DECL_CONTEXT (new_t) = to_context;
6080
6081 *loc = new_t;
6082 }
6083 else
6084 new_t = (tree) *loc;
6085
6086 *tp = new_t;
6087 }
6088
6089
6090 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6091 VARS_MAP maps old ssa names and var_decls to the new ones. */
6092
6093 static tree
6094 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6095 tree to_context)
6096 {
6097 void **loc;
6098 tree new_name;
6099
6100 gcc_assert (!virtual_operand_p (name));
6101
6102 loc = pointer_map_contains (vars_map, name);
6103
6104 if (!loc)
6105 {
6106 tree decl = SSA_NAME_VAR (name);
6107 if (decl)
6108 {
6109 replace_by_duplicate_decl (&decl, vars_map, to_context);
6110 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6111 decl, SSA_NAME_DEF_STMT (name));
6112 if (SSA_NAME_IS_DEFAULT_DEF (name))
6113 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6114 decl, new_name);
6115 }
6116 else
6117 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6118 name, SSA_NAME_DEF_STMT (name));
6119
6120 loc = pointer_map_insert (vars_map, name);
6121 *loc = new_name;
6122 }
6123 else
6124 new_name = (tree) *loc;
6125
6126 return new_name;
6127 }
6128
6129 struct move_stmt_d
6130 {
6131 tree orig_block;
6132 tree new_block;
6133 tree from_context;
6134 tree to_context;
6135 struct pointer_map_t *vars_map;
6136 htab_t new_label_map;
6137 struct pointer_map_t *eh_map;
6138 bool remap_decls_p;
6139 };
6140
6141 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6142 contained in *TP if it has been ORIG_BLOCK previously and change the
6143 DECL_CONTEXT of every local variable referenced in *TP. */
6144
6145 static tree
6146 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6147 {
6148 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6149 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6150 tree t = *tp;
6151
6152 if (EXPR_P (t))
6153 {
6154 tree block = TREE_BLOCK (t);
6155 if (block == p->orig_block
6156 || (p->orig_block == NULL_TREE
6157 && block != NULL_TREE))
6158 TREE_SET_BLOCK (t, p->new_block);
6159 #ifdef ENABLE_CHECKING
6160 else if (block != NULL_TREE)
6161 {
6162 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6163 block = BLOCK_SUPERCONTEXT (block);
6164 gcc_assert (block == p->orig_block);
6165 }
6166 #endif
6167 }
6168 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6169 {
6170 if (TREE_CODE (t) == SSA_NAME)
6171 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6172 else if (TREE_CODE (t) == LABEL_DECL)
6173 {
6174 if (p->new_label_map)
6175 {
6176 struct tree_map in, *out;
6177 in.base.from = t;
6178 out = (struct tree_map *)
6179 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6180 if (out)
6181 *tp = t = out->to;
6182 }
6183
6184 DECL_CONTEXT (t) = p->to_context;
6185 }
6186 else if (p->remap_decls_p)
6187 {
6188 /* Replace T with its duplicate. T should no longer appear in the
6189 parent function, so this looks wasteful; however, it may appear
6190 in referenced_vars, and more importantly, as virtual operands of
6191 statements, and in alias lists of other variables. It would be
6192 quite difficult to expunge it from all those places. ??? It might
6193 suffice to do this for addressable variables. */
6194 if ((TREE_CODE (t) == VAR_DECL
6195 && !is_global_var (t))
6196 || TREE_CODE (t) == CONST_DECL)
6197 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6198 }
6199 *walk_subtrees = 0;
6200 }
6201 else if (TYPE_P (t))
6202 *walk_subtrees = 0;
6203
6204 return NULL_TREE;
6205 }
6206
6207 /* Helper for move_stmt_r. Given an EH region number for the source
6208 function, map that to the duplicate EH regio number in the dest. */
6209
6210 static int
6211 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6212 {
6213 eh_region old_r, new_r;
6214 void **slot;
6215
6216 old_r = get_eh_region_from_number (old_nr);
6217 slot = pointer_map_contains (p->eh_map, old_r);
6218 new_r = (eh_region) *slot;
6219
6220 return new_r->index;
6221 }
6222
6223 /* Similar, but operate on INTEGER_CSTs. */
6224
6225 static tree
6226 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6227 {
6228 int old_nr, new_nr;
6229
6230 old_nr = tree_low_cst (old_t_nr, 0);
6231 new_nr = move_stmt_eh_region_nr (old_nr, p);
6232
6233 return build_int_cst (integer_type_node, new_nr);
6234 }
6235
6236 /* Like move_stmt_op, but for gimple statements.
6237
6238 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6239 contained in the current statement in *GSI_P and change the
6240 DECL_CONTEXT of every local variable referenced in the current
6241 statement. */
6242
6243 static tree
6244 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6245 struct walk_stmt_info *wi)
6246 {
6247 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6248 gimple stmt = gsi_stmt (*gsi_p);
6249 tree block = gimple_block (stmt);
6250
6251 if (block == p->orig_block
6252 || (p->orig_block == NULL_TREE
6253 && block != NULL_TREE))
6254 gimple_set_block (stmt, p->new_block);
6255
6256 switch (gimple_code (stmt))
6257 {
6258 case GIMPLE_CALL:
6259 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6260 {
6261 tree r, fndecl = gimple_call_fndecl (stmt);
6262 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6263 switch (DECL_FUNCTION_CODE (fndecl))
6264 {
6265 case BUILT_IN_EH_COPY_VALUES:
6266 r = gimple_call_arg (stmt, 1);
6267 r = move_stmt_eh_region_tree_nr (r, p);
6268 gimple_call_set_arg (stmt, 1, r);
6269 /* FALLTHRU */
6270
6271 case BUILT_IN_EH_POINTER:
6272 case BUILT_IN_EH_FILTER:
6273 r = gimple_call_arg (stmt, 0);
6274 r = move_stmt_eh_region_tree_nr (r, p);
6275 gimple_call_set_arg (stmt, 0, r);
6276 break;
6277
6278 default:
6279 break;
6280 }
6281 }
6282 break;
6283
6284 case GIMPLE_RESX:
6285 {
6286 int r = gimple_resx_region (stmt);
6287 r = move_stmt_eh_region_nr (r, p);
6288 gimple_resx_set_region (stmt, r);
6289 }
6290 break;
6291
6292 case GIMPLE_EH_DISPATCH:
6293 {
6294 int r = gimple_eh_dispatch_region (stmt);
6295 r = move_stmt_eh_region_nr (r, p);
6296 gimple_eh_dispatch_set_region (stmt, r);
6297 }
6298 break;
6299
6300 case GIMPLE_OMP_RETURN:
6301 case GIMPLE_OMP_CONTINUE:
6302 break;
6303 default:
6304 if (is_gimple_omp (stmt))
6305 {
6306 /* Do not remap variables inside OMP directives. Variables
6307 referenced in clauses and directive header belong to the
6308 parent function and should not be moved into the child
6309 function. */
6310 bool save_remap_decls_p = p->remap_decls_p;
6311 p->remap_decls_p = false;
6312 *handled_ops_p = true;
6313
6314 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6315 move_stmt_op, wi);
6316
6317 p->remap_decls_p = save_remap_decls_p;
6318 }
6319 break;
6320 }
6321
6322 return NULL_TREE;
6323 }
6324
6325 /* Move basic block BB from function CFUN to function DEST_FN. The
6326 block is moved out of the original linked list and placed after
6327 block AFTER in the new list. Also, the block is removed from the
6328 original array of blocks and placed in DEST_FN's array of blocks.
6329 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6330 updated to reflect the moved edges.
6331
6332 The local variables are remapped to new instances, VARS_MAP is used
6333 to record the mapping. */
6334
6335 static void
6336 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6337 basic_block after, bool update_edge_count_p,
6338 struct move_stmt_d *d)
6339 {
6340 struct control_flow_graph *cfg;
6341 edge_iterator ei;
6342 edge e;
6343 gimple_stmt_iterator si;
6344 unsigned old_len, new_len;
6345
6346 /* Remove BB from dominance structures. */
6347 delete_from_dominance_info (CDI_DOMINATORS, bb);
6348
6349 /* Move BB from its current loop to the copy in the new function. */
6350 if (current_loops)
6351 {
6352 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6353 if (new_loop)
6354 bb->loop_father = new_loop;
6355 }
6356
6357 /* Link BB to the new linked list. */
6358 move_block_after (bb, after);
6359
6360 /* Update the edge count in the corresponding flowgraphs. */
6361 if (update_edge_count_p)
6362 FOR_EACH_EDGE (e, ei, bb->succs)
6363 {
6364 cfun->cfg->x_n_edges--;
6365 dest_cfun->cfg->x_n_edges++;
6366 }
6367
6368 /* Remove BB from the original basic block array. */
6369 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6370 cfun->cfg->x_n_basic_blocks--;
6371
6372 /* Grow DEST_CFUN's basic block array if needed. */
6373 cfg = dest_cfun->cfg;
6374 cfg->x_n_basic_blocks++;
6375 if (bb->index >= cfg->x_last_basic_block)
6376 cfg->x_last_basic_block = bb->index + 1;
6377
6378 old_len = vec_safe_length (cfg->x_basic_block_info);
6379 if ((unsigned) cfg->x_last_basic_block >= old_len)
6380 {
6381 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6382 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6383 }
6384
6385 (*cfg->x_basic_block_info)[bb->index] = bb;
6386
6387 /* Remap the variables in phi nodes. */
6388 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6389 {
6390 gimple phi = gsi_stmt (si);
6391 use_operand_p use;
6392 tree op = PHI_RESULT (phi);
6393 ssa_op_iter oi;
6394 unsigned i;
6395
6396 if (virtual_operand_p (op))
6397 {
6398 /* Remove the phi nodes for virtual operands (alias analysis will be
6399 run for the new function, anyway). */
6400 remove_phi_node (&si, true);
6401 continue;
6402 }
6403
6404 SET_PHI_RESULT (phi,
6405 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6406 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6407 {
6408 op = USE_FROM_PTR (use);
6409 if (TREE_CODE (op) == SSA_NAME)
6410 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6411 }
6412
6413 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6414 {
6415 location_t locus = gimple_phi_arg_location (phi, i);
6416 tree block = LOCATION_BLOCK (locus);
6417
6418 if (locus == UNKNOWN_LOCATION)
6419 continue;
6420 if (d->orig_block == NULL_TREE || block == d->orig_block)
6421 {
6422 if (d->new_block == NULL_TREE)
6423 locus = LOCATION_LOCUS (locus);
6424 else
6425 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6426 gimple_phi_arg_set_location (phi, i, locus);
6427 }
6428 }
6429
6430 gsi_next (&si);
6431 }
6432
6433 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6434 {
6435 gimple stmt = gsi_stmt (si);
6436 struct walk_stmt_info wi;
6437
6438 memset (&wi, 0, sizeof (wi));
6439 wi.info = d;
6440 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6441
6442 if (gimple_code (stmt) == GIMPLE_LABEL)
6443 {
6444 tree label = gimple_label_label (stmt);
6445 int uid = LABEL_DECL_UID (label);
6446
6447 gcc_assert (uid > -1);
6448
6449 old_len = vec_safe_length (cfg->x_label_to_block_map);
6450 if (old_len <= (unsigned) uid)
6451 {
6452 new_len = 3 * uid / 2 + 1;
6453 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6454 }
6455
6456 (*cfg->x_label_to_block_map)[uid] = bb;
6457 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6458
6459 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6460
6461 if (uid >= dest_cfun->cfg->last_label_uid)
6462 dest_cfun->cfg->last_label_uid = uid + 1;
6463 }
6464
6465 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6466 remove_stmt_from_eh_lp_fn (cfun, stmt);
6467
6468 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6469 gimple_remove_stmt_histograms (cfun, stmt);
6470
6471 /* We cannot leave any operands allocated from the operand caches of
6472 the current function. */
6473 free_stmt_operands (stmt);
6474 push_cfun (dest_cfun);
6475 update_stmt (stmt);
6476 pop_cfun ();
6477 }
6478
6479 FOR_EACH_EDGE (e, ei, bb->succs)
6480 if (e->goto_locus != UNKNOWN_LOCATION)
6481 {
6482 tree block = LOCATION_BLOCK (e->goto_locus);
6483 if (d->orig_block == NULL_TREE
6484 || block == d->orig_block)
6485 e->goto_locus = d->new_block ?
6486 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6487 LOCATION_LOCUS (e->goto_locus);
6488 }
6489 }
6490
6491 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6492 the outermost EH region. Use REGION as the incoming base EH region. */
6493
6494 static eh_region
6495 find_outermost_region_in_block (struct function *src_cfun,
6496 basic_block bb, eh_region region)
6497 {
6498 gimple_stmt_iterator si;
6499
6500 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6501 {
6502 gimple stmt = gsi_stmt (si);
6503 eh_region stmt_region;
6504 int lp_nr;
6505
6506 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6507 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6508 if (stmt_region)
6509 {
6510 if (region == NULL)
6511 region = stmt_region;
6512 else if (stmt_region != region)
6513 {
6514 region = eh_region_outermost (src_cfun, stmt_region, region);
6515 gcc_assert (region != NULL);
6516 }
6517 }
6518 }
6519
6520 return region;
6521 }
6522
6523 static tree
6524 new_label_mapper (tree decl, void *data)
6525 {
6526 htab_t hash = (htab_t) data;
6527 struct tree_map *m;
6528 void **slot;
6529
6530 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6531
6532 m = XNEW (struct tree_map);
6533 m->hash = DECL_UID (decl);
6534 m->base.from = decl;
6535 m->to = create_artificial_label (UNKNOWN_LOCATION);
6536 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6537 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6538 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6539
6540 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6541 gcc_assert (*slot == NULL);
6542
6543 *slot = m;
6544
6545 return m->to;
6546 }
6547
6548 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6549 subblocks. */
6550
6551 static void
6552 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6553 tree to_context)
6554 {
6555 tree *tp, t;
6556
6557 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6558 {
6559 t = *tp;
6560 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6561 continue;
6562 replace_by_duplicate_decl (&t, vars_map, to_context);
6563 if (t != *tp)
6564 {
6565 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6566 {
6567 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6568 DECL_HAS_VALUE_EXPR_P (t) = 1;
6569 }
6570 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6571 *tp = t;
6572 }
6573 }
6574
6575 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6576 replace_block_vars_by_duplicates (block, vars_map, to_context);
6577 }
6578
6579 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6580 from FN1 to FN2. */
6581
6582 static void
6583 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6584 struct loop *loop)
6585 {
6586 /* Discard it from the old loop array. */
6587 (*get_loops (fn1))[loop->num] = NULL;
6588
6589 /* Place it in the new loop array, assigning it a new number. */
6590 loop->num = number_of_loops (fn2);
6591 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6592
6593 /* Recurse to children. */
6594 for (loop = loop->inner; loop; loop = loop->next)
6595 fixup_loop_arrays_after_move (fn1, fn2, loop);
6596 }
6597
6598 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6599 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6600 single basic block in the original CFG and the new basic block is
6601 returned. DEST_CFUN must not have a CFG yet.
6602
6603 Note that the region need not be a pure SESE region. Blocks inside
6604 the region may contain calls to abort/exit. The only restriction
6605 is that ENTRY_BB should be the only entry point and it must
6606 dominate EXIT_BB.
6607
6608 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6609 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6610 to the new function.
6611
6612 All local variables referenced in the region are assumed to be in
6613 the corresponding BLOCK_VARS and unexpanded variable lists
6614 associated with DEST_CFUN. */
6615
6616 basic_block
6617 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6618 basic_block exit_bb, tree orig_block)
6619 {
6620 vec<basic_block> bbs, dom_bbs;
6621 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6622 basic_block after, bb, *entry_pred, *exit_succ, abb;
6623 struct function *saved_cfun = cfun;
6624 int *entry_flag, *exit_flag;
6625 unsigned *entry_prob, *exit_prob;
6626 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6627 edge e;
6628 edge_iterator ei;
6629 htab_t new_label_map;
6630 struct pointer_map_t *vars_map, *eh_map;
6631 struct loop *loop = entry_bb->loop_father;
6632 struct loop *loop0 = get_loop (saved_cfun, 0);
6633 struct move_stmt_d d;
6634
6635 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6636 region. */
6637 gcc_assert (entry_bb != exit_bb
6638 && (!exit_bb
6639 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6640
6641 /* Collect all the blocks in the region. Manually add ENTRY_BB
6642 because it won't be added by dfs_enumerate_from. */
6643 bbs.create (0);
6644 bbs.safe_push (entry_bb);
6645 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6646
6647 /* The blocks that used to be dominated by something in BBS will now be
6648 dominated by the new block. */
6649 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6650 bbs.address (),
6651 bbs.length ());
6652
6653 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6654 the predecessor edges to ENTRY_BB and the successor edges to
6655 EXIT_BB so that we can re-attach them to the new basic block that
6656 will replace the region. */
6657 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6658 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6659 entry_flag = XNEWVEC (int, num_entry_edges);
6660 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6661 i = 0;
6662 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6663 {
6664 entry_prob[i] = e->probability;
6665 entry_flag[i] = e->flags;
6666 entry_pred[i++] = e->src;
6667 remove_edge (e);
6668 }
6669
6670 if (exit_bb)
6671 {
6672 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6673 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6674 exit_flag = XNEWVEC (int, num_exit_edges);
6675 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6676 i = 0;
6677 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6678 {
6679 exit_prob[i] = e->probability;
6680 exit_flag[i] = e->flags;
6681 exit_succ[i++] = e->dest;
6682 remove_edge (e);
6683 }
6684 }
6685 else
6686 {
6687 num_exit_edges = 0;
6688 exit_succ = NULL;
6689 exit_flag = NULL;
6690 exit_prob = NULL;
6691 }
6692
6693 /* Switch context to the child function to initialize DEST_FN's CFG. */
6694 gcc_assert (dest_cfun->cfg == NULL);
6695 push_cfun (dest_cfun);
6696
6697 init_empty_tree_cfg ();
6698
6699 /* Initialize EH information for the new function. */
6700 eh_map = NULL;
6701 new_label_map = NULL;
6702 if (saved_cfun->eh)
6703 {
6704 eh_region region = NULL;
6705
6706 FOR_EACH_VEC_ELT (bbs, i, bb)
6707 region = find_outermost_region_in_block (saved_cfun, bb, region);
6708
6709 init_eh_for_function ();
6710 if (region != NULL)
6711 {
6712 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6713 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6714 new_label_mapper, new_label_map);
6715 }
6716 }
6717
6718 /* Initialize an empty loop tree. */
6719 struct loops *loops = ggc_alloc_cleared_loops ();
6720 init_loops_structure (dest_cfun, loops, 1);
6721 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6722 set_loops_for_fn (dest_cfun, loops);
6723
6724 /* Move the outlined loop tree part. */
6725 num_nodes = bbs.length ();
6726 FOR_EACH_VEC_ELT (bbs, i, bb)
6727 {
6728 if (bb->loop_father->header == bb)
6729 {
6730 struct loop *this_loop = bb->loop_father;
6731 struct loop *outer = loop_outer (this_loop);
6732 if (outer == loop
6733 /* If the SESE region contains some bbs ending with
6734 a noreturn call, those are considered to belong
6735 to the outermost loop in saved_cfun, rather than
6736 the entry_bb's loop_father. */
6737 || outer == loop0)
6738 {
6739 if (outer != loop)
6740 num_nodes -= this_loop->num_nodes;
6741 flow_loop_tree_node_remove (bb->loop_father);
6742 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6743 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6744 }
6745 }
6746 else if (bb->loop_father == loop0 && loop0 != loop)
6747 num_nodes--;
6748
6749 /* Remove loop exits from the outlined region. */
6750 if (loops_for_fn (saved_cfun)->exits)
6751 FOR_EACH_EDGE (e, ei, bb->succs)
6752 {
6753 void **slot = htab_find_slot_with_hash
6754 (loops_for_fn (saved_cfun)->exits, e,
6755 htab_hash_pointer (e), NO_INSERT);
6756 if (slot)
6757 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6758 }
6759 }
6760
6761
6762 /* Adjust the number of blocks in the tree root of the outlined part. */
6763 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6764
6765 /* Setup a mapping to be used by move_block_to_fn. */
6766 loop->aux = current_loops->tree_root;
6767 loop0->aux = current_loops->tree_root;
6768
6769 pop_cfun ();
6770
6771 /* Move blocks from BBS into DEST_CFUN. */
6772 gcc_assert (bbs.length () >= 2);
6773 after = dest_cfun->cfg->x_entry_block_ptr;
6774 vars_map = pointer_map_create ();
6775
6776 memset (&d, 0, sizeof (d));
6777 d.orig_block = orig_block;
6778 d.new_block = DECL_INITIAL (dest_cfun->decl);
6779 d.from_context = cfun->decl;
6780 d.to_context = dest_cfun->decl;
6781 d.vars_map = vars_map;
6782 d.new_label_map = new_label_map;
6783 d.eh_map = eh_map;
6784 d.remap_decls_p = true;
6785
6786 FOR_EACH_VEC_ELT (bbs, i, bb)
6787 {
6788 /* No need to update edge counts on the last block. It has
6789 already been updated earlier when we detached the region from
6790 the original CFG. */
6791 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6792 after = bb;
6793 }
6794
6795 loop->aux = NULL;
6796 loop0->aux = NULL;
6797 /* Loop sizes are no longer correct, fix them up. */
6798 loop->num_nodes -= num_nodes;
6799 for (struct loop *outer = loop_outer (loop);
6800 outer; outer = loop_outer (outer))
6801 outer->num_nodes -= num_nodes;
6802 loop0->num_nodes -= bbs.length () - num_nodes;
6803
6804 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6805 {
6806 struct loop *aloop;
6807 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6808 if (aloop != NULL)
6809 {
6810 if (aloop->simduid)
6811 {
6812 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6813 d.to_context);
6814 dest_cfun->has_simduid_loops = true;
6815 }
6816 if (aloop->force_vect)
6817 dest_cfun->has_force_vect_loops = true;
6818 }
6819 }
6820
6821 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6822 if (orig_block)
6823 {
6824 tree block;
6825 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6826 == NULL_TREE);
6827 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6828 = BLOCK_SUBBLOCKS (orig_block);
6829 for (block = BLOCK_SUBBLOCKS (orig_block);
6830 block; block = BLOCK_CHAIN (block))
6831 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6832 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6833 }
6834
6835 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6836 vars_map, dest_cfun->decl);
6837
6838 if (new_label_map)
6839 htab_delete (new_label_map);
6840 if (eh_map)
6841 pointer_map_destroy (eh_map);
6842 pointer_map_destroy (vars_map);
6843
6844 /* Rewire the entry and exit blocks. The successor to the entry
6845 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6846 the child function. Similarly, the predecessor of DEST_FN's
6847 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6848 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6849 various CFG manipulation function get to the right CFG.
6850
6851 FIXME, this is silly. The CFG ought to become a parameter to
6852 these helpers. */
6853 push_cfun (dest_cfun);
6854 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6855 if (exit_bb)
6856 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6857 pop_cfun ();
6858
6859 /* Back in the original function, the SESE region has disappeared,
6860 create a new basic block in its place. */
6861 bb = create_empty_bb (entry_pred[0]);
6862 if (current_loops)
6863 add_bb_to_loop (bb, loop);
6864 for (i = 0; i < num_entry_edges; i++)
6865 {
6866 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6867 e->probability = entry_prob[i];
6868 }
6869
6870 for (i = 0; i < num_exit_edges; i++)
6871 {
6872 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6873 e->probability = exit_prob[i];
6874 }
6875
6876 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6877 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6878 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6879 dom_bbs.release ();
6880
6881 if (exit_bb)
6882 {
6883 free (exit_prob);
6884 free (exit_flag);
6885 free (exit_succ);
6886 }
6887 free (entry_prob);
6888 free (entry_flag);
6889 free (entry_pred);
6890 bbs.release ();
6891
6892 return bb;
6893 }
6894
6895
6896 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6897 */
6898
6899 void
6900 dump_function_to_file (tree fndecl, FILE *file, int flags)
6901 {
6902 tree arg, var, old_current_fndecl = current_function_decl;
6903 struct function *dsf;
6904 bool ignore_topmost_bind = false, any_var = false;
6905 basic_block bb;
6906 tree chain;
6907 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6908 && decl_is_tm_clone (fndecl));
6909 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6910
6911 current_function_decl = fndecl;
6912 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6913
6914 arg = DECL_ARGUMENTS (fndecl);
6915 while (arg)
6916 {
6917 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6918 fprintf (file, " ");
6919 print_generic_expr (file, arg, dump_flags);
6920 if (flags & TDF_VERBOSE)
6921 print_node (file, "", arg, 4);
6922 if (DECL_CHAIN (arg))
6923 fprintf (file, ", ");
6924 arg = DECL_CHAIN (arg);
6925 }
6926 fprintf (file, ")\n");
6927
6928 if (flags & TDF_VERBOSE)
6929 print_node (file, "", fndecl, 2);
6930
6931 dsf = DECL_STRUCT_FUNCTION (fndecl);
6932 if (dsf && (flags & TDF_EH))
6933 dump_eh_tree (file, dsf);
6934
6935 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6936 {
6937 dump_node (fndecl, TDF_SLIM | flags, file);
6938 current_function_decl = old_current_fndecl;
6939 return;
6940 }
6941
6942 /* When GIMPLE is lowered, the variables are no longer available in
6943 BIND_EXPRs, so display them separately. */
6944 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6945 {
6946 unsigned ix;
6947 ignore_topmost_bind = true;
6948
6949 fprintf (file, "{\n");
6950 if (!vec_safe_is_empty (fun->local_decls))
6951 FOR_EACH_LOCAL_DECL (fun, ix, var)
6952 {
6953 print_generic_decl (file, var, flags);
6954 if (flags & TDF_VERBOSE)
6955 print_node (file, "", var, 4);
6956 fprintf (file, "\n");
6957
6958 any_var = true;
6959 }
6960 if (gimple_in_ssa_p (cfun))
6961 for (ix = 1; ix < num_ssa_names; ++ix)
6962 {
6963 tree name = ssa_name (ix);
6964 if (name && !SSA_NAME_VAR (name))
6965 {
6966 fprintf (file, " ");
6967 print_generic_expr (file, TREE_TYPE (name), flags);
6968 fprintf (file, " ");
6969 print_generic_expr (file, name, flags);
6970 fprintf (file, ";\n");
6971
6972 any_var = true;
6973 }
6974 }
6975 }
6976
6977 if (fun && fun->decl == fndecl
6978 && fun->cfg
6979 && basic_block_info_for_function (fun))
6980 {
6981 /* If the CFG has been built, emit a CFG-based dump. */
6982 if (!ignore_topmost_bind)
6983 fprintf (file, "{\n");
6984
6985 if (any_var && n_basic_blocks_for_function (fun))
6986 fprintf (file, "\n");
6987
6988 FOR_EACH_BB_FN (bb, fun)
6989 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6990
6991 fprintf (file, "}\n");
6992 }
6993 else if (DECL_SAVED_TREE (fndecl) == NULL)
6994 {
6995 /* The function is now in GIMPLE form but the CFG has not been
6996 built yet. Emit the single sequence of GIMPLE statements
6997 that make up its body. */
6998 gimple_seq body = gimple_body (fndecl);
6999
7000 if (gimple_seq_first_stmt (body)
7001 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7002 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7003 print_gimple_seq (file, body, 0, flags);
7004 else
7005 {
7006 if (!ignore_topmost_bind)
7007 fprintf (file, "{\n");
7008
7009 if (any_var)
7010 fprintf (file, "\n");
7011
7012 print_gimple_seq (file, body, 2, flags);
7013 fprintf (file, "}\n");
7014 }
7015 }
7016 else
7017 {
7018 int indent;
7019
7020 /* Make a tree based dump. */
7021 chain = DECL_SAVED_TREE (fndecl);
7022 if (chain && TREE_CODE (chain) == BIND_EXPR)
7023 {
7024 if (ignore_topmost_bind)
7025 {
7026 chain = BIND_EXPR_BODY (chain);
7027 indent = 2;
7028 }
7029 else
7030 indent = 0;
7031 }
7032 else
7033 {
7034 if (!ignore_topmost_bind)
7035 fprintf (file, "{\n");
7036 indent = 2;
7037 }
7038
7039 if (any_var)
7040 fprintf (file, "\n");
7041
7042 print_generic_stmt_indented (file, chain, flags, indent);
7043 if (ignore_topmost_bind)
7044 fprintf (file, "}\n");
7045 }
7046
7047 if (flags & TDF_ENUMERATE_LOCALS)
7048 dump_enumerated_decls (file, flags);
7049 fprintf (file, "\n\n");
7050
7051 current_function_decl = old_current_fndecl;
7052 }
7053
7054 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7055
7056 DEBUG_FUNCTION void
7057 debug_function (tree fn, int flags)
7058 {
7059 dump_function_to_file (fn, stderr, flags);
7060 }
7061
7062
7063 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7064
7065 static void
7066 print_pred_bbs (FILE *file, basic_block bb)
7067 {
7068 edge e;
7069 edge_iterator ei;
7070
7071 FOR_EACH_EDGE (e, ei, bb->preds)
7072 fprintf (file, "bb_%d ", e->src->index);
7073 }
7074
7075
7076 /* Print on FILE the indexes for the successors of basic_block BB. */
7077
7078 static void
7079 print_succ_bbs (FILE *file, basic_block bb)
7080 {
7081 edge e;
7082 edge_iterator ei;
7083
7084 FOR_EACH_EDGE (e, ei, bb->succs)
7085 fprintf (file, "bb_%d ", e->dest->index);
7086 }
7087
7088 /* Print to FILE the basic block BB following the VERBOSITY level. */
7089
7090 void
7091 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7092 {
7093 char *s_indent = (char *) alloca ((size_t) indent + 1);
7094 memset ((void *) s_indent, ' ', (size_t) indent);
7095 s_indent[indent] = '\0';
7096
7097 /* Print basic_block's header. */
7098 if (verbosity >= 2)
7099 {
7100 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7101 print_pred_bbs (file, bb);
7102 fprintf (file, "}, succs = {");
7103 print_succ_bbs (file, bb);
7104 fprintf (file, "})\n");
7105 }
7106
7107 /* Print basic_block's body. */
7108 if (verbosity >= 3)
7109 {
7110 fprintf (file, "%s {\n", s_indent);
7111 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7112 fprintf (file, "%s }\n", s_indent);
7113 }
7114 }
7115
7116 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7117
7118 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7119 VERBOSITY level this outputs the contents of the loop, or just its
7120 structure. */
7121
7122 static void
7123 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7124 {
7125 char *s_indent;
7126 basic_block bb;
7127
7128 if (loop == NULL)
7129 return;
7130
7131 s_indent = (char *) alloca ((size_t) indent + 1);
7132 memset ((void *) s_indent, ' ', (size_t) indent);
7133 s_indent[indent] = '\0';
7134
7135 /* Print loop's header. */
7136 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7137 if (loop->header)
7138 fprintf (file, "header = %d", loop->header->index);
7139 else
7140 {
7141 fprintf (file, "deleted)\n");
7142 return;
7143 }
7144 if (loop->latch)
7145 fprintf (file, ", latch = %d", loop->latch->index);
7146 else
7147 fprintf (file, ", multiple latches");
7148 fprintf (file, ", niter = ");
7149 print_generic_expr (file, loop->nb_iterations, 0);
7150
7151 if (loop->any_upper_bound)
7152 {
7153 fprintf (file, ", upper_bound = ");
7154 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7155 }
7156
7157 if (loop->any_estimate)
7158 {
7159 fprintf (file, ", estimate = ");
7160 dump_double_int (file, loop->nb_iterations_estimate, true);
7161 }
7162 fprintf (file, ")\n");
7163
7164 /* Print loop's body. */
7165 if (verbosity >= 1)
7166 {
7167 fprintf (file, "%s{\n", s_indent);
7168 FOR_EACH_BB (bb)
7169 if (bb->loop_father == loop)
7170 print_loops_bb (file, bb, indent, verbosity);
7171
7172 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7173 fprintf (file, "%s}\n", s_indent);
7174 }
7175 }
7176
7177 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7178 spaces. Following VERBOSITY level this outputs the contents of the
7179 loop, or just its structure. */
7180
7181 static void
7182 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7183 int verbosity)
7184 {
7185 if (loop == NULL)
7186 return;
7187
7188 print_loop (file, loop, indent, verbosity);
7189 print_loop_and_siblings (file, loop->next, indent, verbosity);
7190 }
7191
7192 /* Follow a CFG edge from the entry point of the program, and on entry
7193 of a loop, pretty print the loop structure on FILE. */
7194
7195 void
7196 print_loops (FILE *file, int verbosity)
7197 {
7198 basic_block bb;
7199
7200 bb = ENTRY_BLOCK_PTR;
7201 if (bb && bb->loop_father)
7202 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7203 }
7204
7205 /* Dump a loop. */
7206
7207 DEBUG_FUNCTION void
7208 debug (struct loop &ref)
7209 {
7210 print_loop (stderr, &ref, 0, /*verbosity*/0);
7211 }
7212
7213 DEBUG_FUNCTION void
7214 debug (struct loop *ptr)
7215 {
7216 if (ptr)
7217 debug (*ptr);
7218 else
7219 fprintf (stderr, "<nil>\n");
7220 }
7221
7222 /* Dump a loop verbosely. */
7223
7224 DEBUG_FUNCTION void
7225 debug_verbose (struct loop &ref)
7226 {
7227 print_loop (stderr, &ref, 0, /*verbosity*/3);
7228 }
7229
7230 DEBUG_FUNCTION void
7231 debug_verbose (struct loop *ptr)
7232 {
7233 if (ptr)
7234 debug (*ptr);
7235 else
7236 fprintf (stderr, "<nil>\n");
7237 }
7238
7239
7240 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7241
7242 DEBUG_FUNCTION void
7243 debug_loops (int verbosity)
7244 {
7245 print_loops (stderr, verbosity);
7246 }
7247
7248 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7249
7250 DEBUG_FUNCTION void
7251 debug_loop (struct loop *loop, int verbosity)
7252 {
7253 print_loop (stderr, loop, 0, verbosity);
7254 }
7255
7256 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7257 level. */
7258
7259 DEBUG_FUNCTION void
7260 debug_loop_num (unsigned num, int verbosity)
7261 {
7262 debug_loop (get_loop (cfun, num), verbosity);
7263 }
7264
7265 /* Return true if BB ends with a call, possibly followed by some
7266 instructions that must stay with the call. Return false,
7267 otherwise. */
7268
7269 static bool
7270 gimple_block_ends_with_call_p (basic_block bb)
7271 {
7272 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7273 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7274 }
7275
7276
7277 /* Return true if BB ends with a conditional branch. Return false,
7278 otherwise. */
7279
7280 static bool
7281 gimple_block_ends_with_condjump_p (const_basic_block bb)
7282 {
7283 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7284 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7285 }
7286
7287
7288 /* Return true if we need to add fake edge to exit at statement T.
7289 Helper function for gimple_flow_call_edges_add. */
7290
7291 static bool
7292 need_fake_edge_p (gimple t)
7293 {
7294 tree fndecl = NULL_TREE;
7295 int call_flags = 0;
7296
7297 /* NORETURN and LONGJMP calls already have an edge to exit.
7298 CONST and PURE calls do not need one.
7299 We don't currently check for CONST and PURE here, although
7300 it would be a good idea, because those attributes are
7301 figured out from the RTL in mark_constant_function, and
7302 the counter incrementation code from -fprofile-arcs
7303 leads to different results from -fbranch-probabilities. */
7304 if (is_gimple_call (t))
7305 {
7306 fndecl = gimple_call_fndecl (t);
7307 call_flags = gimple_call_flags (t);
7308 }
7309
7310 if (is_gimple_call (t)
7311 && fndecl
7312 && DECL_BUILT_IN (fndecl)
7313 && (call_flags & ECF_NOTHROW)
7314 && !(call_flags & ECF_RETURNS_TWICE)
7315 /* fork() doesn't really return twice, but the effect of
7316 wrapping it in __gcov_fork() which calls __gcov_flush()
7317 and clears the counters before forking has the same
7318 effect as returning twice. Force a fake edge. */
7319 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7320 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7321 return false;
7322
7323 if (is_gimple_call (t))
7324 {
7325 edge_iterator ei;
7326 edge e;
7327 basic_block bb;
7328
7329 if (!(call_flags & ECF_NORETURN))
7330 return true;
7331
7332 bb = gimple_bb (t);
7333 FOR_EACH_EDGE (e, ei, bb->succs)
7334 if ((e->flags & EDGE_FAKE) == 0)
7335 return true;
7336 }
7337
7338 if (gimple_code (t) == GIMPLE_ASM
7339 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7340 return true;
7341
7342 return false;
7343 }
7344
7345
7346 /* Add fake edges to the function exit for any non constant and non
7347 noreturn calls (or noreturn calls with EH/abnormal edges),
7348 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7349 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7350 that were split.
7351
7352 The goal is to expose cases in which entering a basic block does
7353 not imply that all subsequent instructions must be executed. */
7354
7355 static int
7356 gimple_flow_call_edges_add (sbitmap blocks)
7357 {
7358 int i;
7359 int blocks_split = 0;
7360 int last_bb = last_basic_block;
7361 bool check_last_block = false;
7362
7363 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7364 return 0;
7365
7366 if (! blocks)
7367 check_last_block = true;
7368 else
7369 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7370
7371 /* In the last basic block, before epilogue generation, there will be
7372 a fallthru edge to EXIT. Special care is required if the last insn
7373 of the last basic block is a call because make_edge folds duplicate
7374 edges, which would result in the fallthru edge also being marked
7375 fake, which would result in the fallthru edge being removed by
7376 remove_fake_edges, which would result in an invalid CFG.
7377
7378 Moreover, we can't elide the outgoing fake edge, since the block
7379 profiler needs to take this into account in order to solve the minimal
7380 spanning tree in the case that the call doesn't return.
7381
7382 Handle this by adding a dummy instruction in a new last basic block. */
7383 if (check_last_block)
7384 {
7385 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7386 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7387 gimple t = NULL;
7388
7389 if (!gsi_end_p (gsi))
7390 t = gsi_stmt (gsi);
7391
7392 if (t && need_fake_edge_p (t))
7393 {
7394 edge e;
7395
7396 e = find_edge (bb, EXIT_BLOCK_PTR);
7397 if (e)
7398 {
7399 gsi_insert_on_edge (e, gimple_build_nop ());
7400 gsi_commit_edge_inserts ();
7401 }
7402 }
7403 }
7404
7405 /* Now add fake edges to the function exit for any non constant
7406 calls since there is no way that we can determine if they will
7407 return or not... */
7408 for (i = 0; i < last_bb; i++)
7409 {
7410 basic_block bb = BASIC_BLOCK (i);
7411 gimple_stmt_iterator gsi;
7412 gimple stmt, last_stmt;
7413
7414 if (!bb)
7415 continue;
7416
7417 if (blocks && !bitmap_bit_p (blocks, i))
7418 continue;
7419
7420 gsi = gsi_last_nondebug_bb (bb);
7421 if (!gsi_end_p (gsi))
7422 {
7423 last_stmt = gsi_stmt (gsi);
7424 do
7425 {
7426 stmt = gsi_stmt (gsi);
7427 if (need_fake_edge_p (stmt))
7428 {
7429 edge e;
7430
7431 /* The handling above of the final block before the
7432 epilogue should be enough to verify that there is
7433 no edge to the exit block in CFG already.
7434 Calling make_edge in such case would cause us to
7435 mark that edge as fake and remove it later. */
7436 #ifdef ENABLE_CHECKING
7437 if (stmt == last_stmt)
7438 {
7439 e = find_edge (bb, EXIT_BLOCK_PTR);
7440 gcc_assert (e == NULL);
7441 }
7442 #endif
7443
7444 /* Note that the following may create a new basic block
7445 and renumber the existing basic blocks. */
7446 if (stmt != last_stmt)
7447 {
7448 e = split_block (bb, stmt);
7449 if (e)
7450 blocks_split++;
7451 }
7452 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7453 }
7454 gsi_prev (&gsi);
7455 }
7456 while (!gsi_end_p (gsi));
7457 }
7458 }
7459
7460 if (blocks_split)
7461 verify_flow_info ();
7462
7463 return blocks_split;
7464 }
7465
7466 /* Removes edge E and all the blocks dominated by it, and updates dominance
7467 information. The IL in E->src needs to be updated separately.
7468 If dominance info is not available, only the edge E is removed.*/
7469
7470 void
7471 remove_edge_and_dominated_blocks (edge e)
7472 {
7473 vec<basic_block> bbs_to_remove = vNULL;
7474 vec<basic_block> bbs_to_fix_dom = vNULL;
7475 bitmap df, df_idom;
7476 edge f;
7477 edge_iterator ei;
7478 bool none_removed = false;
7479 unsigned i;
7480 basic_block bb, dbb;
7481 bitmap_iterator bi;
7482
7483 if (!dom_info_available_p (CDI_DOMINATORS))
7484 {
7485 remove_edge (e);
7486 return;
7487 }
7488
7489 /* No updating is needed for edges to exit. */
7490 if (e->dest == EXIT_BLOCK_PTR)
7491 {
7492 if (cfgcleanup_altered_bbs)
7493 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7494 remove_edge (e);
7495 return;
7496 }
7497
7498 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7499 that is not dominated by E->dest, then this set is empty. Otherwise,
7500 all the basic blocks dominated by E->dest are removed.
7501
7502 Also, to DF_IDOM we store the immediate dominators of the blocks in
7503 the dominance frontier of E (i.e., of the successors of the
7504 removed blocks, if there are any, and of E->dest otherwise). */
7505 FOR_EACH_EDGE (f, ei, e->dest->preds)
7506 {
7507 if (f == e)
7508 continue;
7509
7510 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7511 {
7512 none_removed = true;
7513 break;
7514 }
7515 }
7516
7517 df = BITMAP_ALLOC (NULL);
7518 df_idom = BITMAP_ALLOC (NULL);
7519
7520 if (none_removed)
7521 bitmap_set_bit (df_idom,
7522 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7523 else
7524 {
7525 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7526 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7527 {
7528 FOR_EACH_EDGE (f, ei, bb->succs)
7529 {
7530 if (f->dest != EXIT_BLOCK_PTR)
7531 bitmap_set_bit (df, f->dest->index);
7532 }
7533 }
7534 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7535 bitmap_clear_bit (df, bb->index);
7536
7537 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7538 {
7539 bb = BASIC_BLOCK (i);
7540 bitmap_set_bit (df_idom,
7541 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7542 }
7543 }
7544
7545 if (cfgcleanup_altered_bbs)
7546 {
7547 /* Record the set of the altered basic blocks. */
7548 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7549 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7550 }
7551
7552 /* Remove E and the cancelled blocks. */
7553 if (none_removed)
7554 remove_edge (e);
7555 else
7556 {
7557 /* Walk backwards so as to get a chance to substitute all
7558 released DEFs into debug stmts. See
7559 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7560 details. */
7561 for (i = bbs_to_remove.length (); i-- > 0; )
7562 delete_basic_block (bbs_to_remove[i]);
7563 }
7564
7565 /* Update the dominance information. The immediate dominator may change only
7566 for blocks whose immediate dominator belongs to DF_IDOM:
7567
7568 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7569 removal. Let Z the arbitrary block such that idom(Z) = Y and
7570 Z dominates X after the removal. Before removal, there exists a path P
7571 from Y to X that avoids Z. Let F be the last edge on P that is
7572 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7573 dominates W, and because of P, Z does not dominate W), and W belongs to
7574 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7575 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7576 {
7577 bb = BASIC_BLOCK (i);
7578 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7579 dbb;
7580 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7581 bbs_to_fix_dom.safe_push (dbb);
7582 }
7583
7584 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7585
7586 BITMAP_FREE (df);
7587 BITMAP_FREE (df_idom);
7588 bbs_to_remove.release ();
7589 bbs_to_fix_dom.release ();
7590 }
7591
7592 /* Purge dead EH edges from basic block BB. */
7593
7594 bool
7595 gimple_purge_dead_eh_edges (basic_block bb)
7596 {
7597 bool changed = false;
7598 edge e;
7599 edge_iterator ei;
7600 gimple stmt = last_stmt (bb);
7601
7602 if (stmt && stmt_can_throw_internal (stmt))
7603 return false;
7604
7605 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7606 {
7607 if (e->flags & EDGE_EH)
7608 {
7609 remove_edge_and_dominated_blocks (e);
7610 changed = true;
7611 }
7612 else
7613 ei_next (&ei);
7614 }
7615
7616 return changed;
7617 }
7618
7619 /* Purge dead EH edges from basic block listed in BLOCKS. */
7620
7621 bool
7622 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7623 {
7624 bool changed = false;
7625 unsigned i;
7626 bitmap_iterator bi;
7627
7628 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7629 {
7630 basic_block bb = BASIC_BLOCK (i);
7631
7632 /* Earlier gimple_purge_dead_eh_edges could have removed
7633 this basic block already. */
7634 gcc_assert (bb || changed);
7635 if (bb != NULL)
7636 changed |= gimple_purge_dead_eh_edges (bb);
7637 }
7638
7639 return changed;
7640 }
7641
7642 /* Purge dead abnormal call edges from basic block BB. */
7643
7644 bool
7645 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7646 {
7647 bool changed = false;
7648 edge e;
7649 edge_iterator ei;
7650 gimple stmt = last_stmt (bb);
7651
7652 if (!cfun->has_nonlocal_label
7653 && !cfun->calls_setjmp)
7654 return false;
7655
7656 if (stmt && stmt_can_make_abnormal_goto (stmt))
7657 return false;
7658
7659 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7660 {
7661 if (e->flags & EDGE_ABNORMAL)
7662 {
7663 if (e->flags & EDGE_FALLTHRU)
7664 e->flags &= ~EDGE_ABNORMAL;
7665 else
7666 remove_edge_and_dominated_blocks (e);
7667 changed = true;
7668 }
7669 else
7670 ei_next (&ei);
7671 }
7672
7673 return changed;
7674 }
7675
7676 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7677
7678 bool
7679 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7680 {
7681 bool changed = false;
7682 unsigned i;
7683 bitmap_iterator bi;
7684
7685 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7686 {
7687 basic_block bb = BASIC_BLOCK (i);
7688
7689 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7690 this basic block already. */
7691 gcc_assert (bb || changed);
7692 if (bb != NULL)
7693 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7694 }
7695
7696 return changed;
7697 }
7698
7699 /* This function is called whenever a new edge is created or
7700 redirected. */
7701
7702 static void
7703 gimple_execute_on_growing_pred (edge e)
7704 {
7705 basic_block bb = e->dest;
7706
7707 if (!gimple_seq_empty_p (phi_nodes (bb)))
7708 reserve_phi_args_for_new_edge (bb);
7709 }
7710
7711 /* This function is called immediately before edge E is removed from
7712 the edge vector E->dest->preds. */
7713
7714 static void
7715 gimple_execute_on_shrinking_pred (edge e)
7716 {
7717 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7718 remove_phi_args (e);
7719 }
7720
7721 /*---------------------------------------------------------------------------
7722 Helper functions for Loop versioning
7723 ---------------------------------------------------------------------------*/
7724
7725 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7726 of 'first'. Both of them are dominated by 'new_head' basic block. When
7727 'new_head' was created by 'second's incoming edge it received phi arguments
7728 on the edge by split_edge(). Later, additional edge 'e' was created to
7729 connect 'new_head' and 'first'. Now this routine adds phi args on this
7730 additional edge 'e' that new_head to second edge received as part of edge
7731 splitting. */
7732
7733 static void
7734 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7735 basic_block new_head, edge e)
7736 {
7737 gimple phi1, phi2;
7738 gimple_stmt_iterator psi1, psi2;
7739 tree def;
7740 edge e2 = find_edge (new_head, second);
7741
7742 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7743 edge, we should always have an edge from NEW_HEAD to SECOND. */
7744 gcc_assert (e2 != NULL);
7745
7746 /* Browse all 'second' basic block phi nodes and add phi args to
7747 edge 'e' for 'first' head. PHI args are always in correct order. */
7748
7749 for (psi2 = gsi_start_phis (second),
7750 psi1 = gsi_start_phis (first);
7751 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7752 gsi_next (&psi2), gsi_next (&psi1))
7753 {
7754 phi1 = gsi_stmt (psi1);
7755 phi2 = gsi_stmt (psi2);
7756 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7757 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7758 }
7759 }
7760
7761
7762 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7763 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7764 the destination of the ELSE part. */
7765
7766 static void
7767 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7768 basic_block second_head ATTRIBUTE_UNUSED,
7769 basic_block cond_bb, void *cond_e)
7770 {
7771 gimple_stmt_iterator gsi;
7772 gimple new_cond_expr;
7773 tree cond_expr = (tree) cond_e;
7774 edge e0;
7775
7776 /* Build new conditional expr */
7777 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7778 NULL_TREE, NULL_TREE);
7779
7780 /* Add new cond in cond_bb. */
7781 gsi = gsi_last_bb (cond_bb);
7782 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7783
7784 /* Adjust edges appropriately to connect new head with first head
7785 as well as second head. */
7786 e0 = single_succ_edge (cond_bb);
7787 e0->flags &= ~EDGE_FALLTHRU;
7788 e0->flags |= EDGE_FALSE_VALUE;
7789 }
7790
7791
7792 /* Do book-keeping of basic block BB for the profile consistency checker.
7793 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7794 then do post-pass accounting. Store the counting in RECORD. */
7795 static void
7796 gimple_account_profile_record (basic_block bb, int after_pass,
7797 struct profile_record *record)
7798 {
7799 gimple_stmt_iterator i;
7800 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7801 {
7802 record->size[after_pass]
7803 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7804 if (profile_status == PROFILE_READ)
7805 record->time[after_pass]
7806 += estimate_num_insns (gsi_stmt (i),
7807 &eni_time_weights) * bb->count;
7808 else if (profile_status == PROFILE_GUESSED)
7809 record->time[after_pass]
7810 += estimate_num_insns (gsi_stmt (i),
7811 &eni_time_weights) * bb->frequency;
7812 }
7813 }
7814
7815 struct cfg_hooks gimple_cfg_hooks = {
7816 "gimple",
7817 gimple_verify_flow_info,
7818 gimple_dump_bb, /* dump_bb */
7819 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7820 create_bb, /* create_basic_block */
7821 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7822 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7823 gimple_can_remove_branch_p, /* can_remove_branch_p */
7824 remove_bb, /* delete_basic_block */
7825 gimple_split_block, /* split_block */
7826 gimple_move_block_after, /* move_block_after */
7827 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7828 gimple_merge_blocks, /* merge_blocks */
7829 gimple_predict_edge, /* predict_edge */
7830 gimple_predicted_by_p, /* predicted_by_p */
7831 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7832 gimple_duplicate_bb, /* duplicate_block */
7833 gimple_split_edge, /* split_edge */
7834 gimple_make_forwarder_block, /* make_forward_block */
7835 NULL, /* tidy_fallthru_edge */
7836 NULL, /* force_nonfallthru */
7837 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7838 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7839 gimple_flow_call_edges_add, /* flow_call_edges_add */
7840 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7841 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7842 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7843 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7844 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7845 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7846 flush_pending_stmts, /* flush_pending_stmts */
7847 gimple_empty_block_p, /* block_empty_p */
7848 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7849 gimple_account_profile_record,
7850 };
7851
7852
7853 /* Split all critical edges. */
7854
7855 static unsigned int
7856 split_critical_edges (void)
7857 {
7858 basic_block bb;
7859 edge e;
7860 edge_iterator ei;
7861
7862 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7863 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7864 mappings around the calls to split_edge. */
7865 start_recording_case_labels ();
7866 FOR_ALL_BB (bb)
7867 {
7868 FOR_EACH_EDGE (e, ei, bb->succs)
7869 {
7870 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7871 split_edge (e);
7872 /* PRE inserts statements to edges and expects that
7873 since split_critical_edges was done beforehand, committing edge
7874 insertions will not split more edges. In addition to critical
7875 edges we must split edges that have multiple successors and
7876 end by control flow statements, such as RESX.
7877 Go ahead and split them too. This matches the logic in
7878 gimple_find_edge_insert_loc. */
7879 else if ((!single_pred_p (e->dest)
7880 || !gimple_seq_empty_p (phi_nodes (e->dest))
7881 || e->dest == EXIT_BLOCK_PTR)
7882 && e->src != ENTRY_BLOCK_PTR
7883 && !(e->flags & EDGE_ABNORMAL))
7884 {
7885 gimple_stmt_iterator gsi;
7886
7887 gsi = gsi_last_bb (e->src);
7888 if (!gsi_end_p (gsi)
7889 && stmt_ends_bb_p (gsi_stmt (gsi))
7890 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7891 && !gimple_call_builtin_p (gsi_stmt (gsi),
7892 BUILT_IN_RETURN)))
7893 split_edge (e);
7894 }
7895 }
7896 }
7897 end_recording_case_labels ();
7898 return 0;
7899 }
7900
7901 namespace {
7902
7903 const pass_data pass_data_split_crit_edges =
7904 {
7905 GIMPLE_PASS, /* type */
7906 "crited", /* name */
7907 OPTGROUP_NONE, /* optinfo_flags */
7908 false, /* has_gate */
7909 true, /* has_execute */
7910 TV_TREE_SPLIT_EDGES, /* tv_id */
7911 PROP_cfg, /* properties_required */
7912 PROP_no_crit_edges, /* properties_provided */
7913 0, /* properties_destroyed */
7914 0, /* todo_flags_start */
7915 TODO_verify_flow, /* todo_flags_finish */
7916 };
7917
7918 class pass_split_crit_edges : public gimple_opt_pass
7919 {
7920 public:
7921 pass_split_crit_edges (gcc::context *ctxt)
7922 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7923 {}
7924
7925 /* opt_pass methods: */
7926 unsigned int execute () { return split_critical_edges (); }
7927
7928 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7929 }; // class pass_split_crit_edges
7930
7931 } // anon namespace
7932
7933 gimple_opt_pass *
7934 make_pass_split_crit_edges (gcc::context *ctxt)
7935 {
7936 return new pass_split_crit_edges (ctxt);
7937 }
7938
7939
7940 /* Build a ternary operation and gimplify it. Emit code before GSI.
7941 Return the gimple_val holding the result. */
7942
7943 tree
7944 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7945 tree type, tree a, tree b, tree c)
7946 {
7947 tree ret;
7948 location_t loc = gimple_location (gsi_stmt (*gsi));
7949
7950 ret = fold_build3_loc (loc, code, type, a, b, c);
7951 STRIP_NOPS (ret);
7952
7953 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7954 GSI_SAME_STMT);
7955 }
7956
7957 /* Build a binary operation and gimplify it. Emit code before GSI.
7958 Return the gimple_val holding the result. */
7959
7960 tree
7961 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7962 tree type, tree a, tree b)
7963 {
7964 tree ret;
7965
7966 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7967 STRIP_NOPS (ret);
7968
7969 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7970 GSI_SAME_STMT);
7971 }
7972
7973 /* Build a unary operation and gimplify it. Emit code before GSI.
7974 Return the gimple_val holding the result. */
7975
7976 tree
7977 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7978 tree a)
7979 {
7980 tree ret;
7981
7982 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7983 STRIP_NOPS (ret);
7984
7985 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7986 GSI_SAME_STMT);
7987 }
7988
7989
7990 \f
7991 /* Emit return warnings. */
7992
7993 static unsigned int
7994 execute_warn_function_return (void)
7995 {
7996 source_location location;
7997 gimple last;
7998 edge e;
7999 edge_iterator ei;
8000
8001 if (!targetm.warn_func_return (cfun->decl))
8002 return 0;
8003
8004 /* If we have a path to EXIT, then we do return. */
8005 if (TREE_THIS_VOLATILE (cfun->decl)
8006 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
8007 {
8008 location = UNKNOWN_LOCATION;
8009 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8010 {
8011 last = last_stmt (e->src);
8012 if ((gimple_code (last) == GIMPLE_RETURN
8013 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8014 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8015 break;
8016 }
8017 if (location == UNKNOWN_LOCATION)
8018 location = cfun->function_end_locus;
8019 warning_at (location, 0, "%<noreturn%> function does return");
8020 }
8021
8022 /* If we see "return;" in some basic block, then we do reach the end
8023 without returning a value. */
8024 else if (warn_return_type
8025 && !TREE_NO_WARNING (cfun->decl)
8026 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
8027 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8028 {
8029 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8030 {
8031 gimple last = last_stmt (e->src);
8032 if (gimple_code (last) == GIMPLE_RETURN
8033 && gimple_return_retval (last) == NULL
8034 && !gimple_no_warning_p (last))
8035 {
8036 location = gimple_location (last);
8037 if (location == UNKNOWN_LOCATION)
8038 location = cfun->function_end_locus;
8039 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8040 TREE_NO_WARNING (cfun->decl) = 1;
8041 break;
8042 }
8043 }
8044 }
8045 return 0;
8046 }
8047
8048
8049 /* Given a basic block B which ends with a conditional and has
8050 precisely two successors, determine which of the edges is taken if
8051 the conditional is true and which is taken if the conditional is
8052 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8053
8054 void
8055 extract_true_false_edges_from_block (basic_block b,
8056 edge *true_edge,
8057 edge *false_edge)
8058 {
8059 edge e = EDGE_SUCC (b, 0);
8060
8061 if (e->flags & EDGE_TRUE_VALUE)
8062 {
8063 *true_edge = e;
8064 *false_edge = EDGE_SUCC (b, 1);
8065 }
8066 else
8067 {
8068 *false_edge = e;
8069 *true_edge = EDGE_SUCC (b, 1);
8070 }
8071 }
8072
8073 namespace {
8074
8075 const pass_data pass_data_warn_function_return =
8076 {
8077 GIMPLE_PASS, /* type */
8078 "*warn_function_return", /* name */
8079 OPTGROUP_NONE, /* optinfo_flags */
8080 false, /* has_gate */
8081 true, /* has_execute */
8082 TV_NONE, /* tv_id */
8083 PROP_cfg, /* properties_required */
8084 0, /* properties_provided */
8085 0, /* properties_destroyed */
8086 0, /* todo_flags_start */
8087 0, /* todo_flags_finish */
8088 };
8089
8090 class pass_warn_function_return : public gimple_opt_pass
8091 {
8092 public:
8093 pass_warn_function_return (gcc::context *ctxt)
8094 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8095 {}
8096
8097 /* opt_pass methods: */
8098 unsigned int execute () { return execute_warn_function_return (); }
8099
8100 }; // class pass_warn_function_return
8101
8102 } // anon namespace
8103
8104 gimple_opt_pass *
8105 make_pass_warn_function_return (gcc::context *ctxt)
8106 {
8107 return new pass_warn_function_return (ctxt);
8108 }
8109
8110 /* Walk a gimplified function and warn for functions whose return value is
8111 ignored and attribute((warn_unused_result)) is set. This is done before
8112 inlining, so we don't have to worry about that. */
8113
8114 static void
8115 do_warn_unused_result (gimple_seq seq)
8116 {
8117 tree fdecl, ftype;
8118 gimple_stmt_iterator i;
8119
8120 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8121 {
8122 gimple g = gsi_stmt (i);
8123
8124 switch (gimple_code (g))
8125 {
8126 case GIMPLE_BIND:
8127 do_warn_unused_result (gimple_bind_body (g));
8128 break;
8129 case GIMPLE_TRY:
8130 do_warn_unused_result (gimple_try_eval (g));
8131 do_warn_unused_result (gimple_try_cleanup (g));
8132 break;
8133 case GIMPLE_CATCH:
8134 do_warn_unused_result (gimple_catch_handler (g));
8135 break;
8136 case GIMPLE_EH_FILTER:
8137 do_warn_unused_result (gimple_eh_filter_failure (g));
8138 break;
8139
8140 case GIMPLE_CALL:
8141 if (gimple_call_lhs (g))
8142 break;
8143 if (gimple_call_internal_p (g))
8144 break;
8145
8146 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8147 LHS. All calls whose value is ignored should be
8148 represented like this. Look for the attribute. */
8149 fdecl = gimple_call_fndecl (g);
8150 ftype = gimple_call_fntype (g);
8151
8152 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8153 {
8154 location_t loc = gimple_location (g);
8155
8156 if (fdecl)
8157 warning_at (loc, OPT_Wunused_result,
8158 "ignoring return value of %qD, "
8159 "declared with attribute warn_unused_result",
8160 fdecl);
8161 else
8162 warning_at (loc, OPT_Wunused_result,
8163 "ignoring return value of function "
8164 "declared with attribute warn_unused_result");
8165 }
8166 break;
8167
8168 default:
8169 /* Not a container, not a call, or a call whose value is used. */
8170 break;
8171 }
8172 }
8173 }
8174
8175 static unsigned int
8176 run_warn_unused_result (void)
8177 {
8178 do_warn_unused_result (gimple_body (current_function_decl));
8179 return 0;
8180 }
8181
8182 static bool
8183 gate_warn_unused_result (void)
8184 {
8185 return flag_warn_unused_result;
8186 }
8187
8188 namespace {
8189
8190 const pass_data pass_data_warn_unused_result =
8191 {
8192 GIMPLE_PASS, /* type */
8193 "*warn_unused_result", /* name */
8194 OPTGROUP_NONE, /* optinfo_flags */
8195 true, /* has_gate */
8196 true, /* has_execute */
8197 TV_NONE, /* tv_id */
8198 PROP_gimple_any, /* properties_required */
8199 0, /* properties_provided */
8200 0, /* properties_destroyed */
8201 0, /* todo_flags_start */
8202 0, /* todo_flags_finish */
8203 };
8204
8205 class pass_warn_unused_result : public gimple_opt_pass
8206 {
8207 public:
8208 pass_warn_unused_result (gcc::context *ctxt)
8209 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8210 {}
8211
8212 /* opt_pass methods: */
8213 bool gate () { return gate_warn_unused_result (); }
8214 unsigned int execute () { return run_warn_unused_result (); }
8215
8216 }; // class pass_warn_unused_result
8217
8218 } // anon namespace
8219
8220 gimple_opt_pass *
8221 make_pass_warn_unused_result (gcc::context *ctxt)
8222 {
8223 return new pass_warn_unused_result (ctxt);
8224 }
8225
8226 /* IPA passes, compilation of earlier functions or inlining
8227 might have changed some properties, such as marked functions nothrow,
8228 pure, const or noreturn.
8229 Remove redundant edges and basic blocks, and create new ones if necessary.
8230
8231 This pass can't be executed as stand alone pass from pass manager, because
8232 in between inlining and this fixup the verify_flow_info would fail. */
8233
8234 unsigned int
8235 execute_fixup_cfg (void)
8236 {
8237 basic_block bb;
8238 gimple_stmt_iterator gsi;
8239 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8240 gcov_type count_scale;
8241 edge e;
8242 edge_iterator ei;
8243
8244 count_scale
8245 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8246 ENTRY_BLOCK_PTR->count);
8247
8248 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8249 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8250 count_scale);
8251
8252 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8253 e->count = apply_scale (e->count, count_scale);
8254
8255 FOR_EACH_BB (bb)
8256 {
8257 bb->count = apply_scale (bb->count, count_scale);
8258 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8259 {
8260 gimple stmt = gsi_stmt (gsi);
8261 tree decl = is_gimple_call (stmt)
8262 ? gimple_call_fndecl (stmt)
8263 : NULL;
8264 if (decl)
8265 {
8266 int flags = gimple_call_flags (stmt);
8267 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8268 {
8269 if (gimple_purge_dead_abnormal_call_edges (bb))
8270 todo |= TODO_cleanup_cfg;
8271
8272 if (gimple_in_ssa_p (cfun))
8273 {
8274 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8275 update_stmt (stmt);
8276 }
8277 }
8278
8279 if (flags & ECF_NORETURN
8280 && fixup_noreturn_call (stmt))
8281 todo |= TODO_cleanup_cfg;
8282 }
8283
8284 if (maybe_clean_eh_stmt (stmt)
8285 && gimple_purge_dead_eh_edges (bb))
8286 todo |= TODO_cleanup_cfg;
8287 }
8288
8289 FOR_EACH_EDGE (e, ei, bb->succs)
8290 e->count = apply_scale (e->count, count_scale);
8291
8292 /* If we have a basic block with no successors that does not
8293 end with a control statement or a noreturn call end it with
8294 a call to __builtin_unreachable. This situation can occur
8295 when inlining a noreturn call that does in fact return. */
8296 if (EDGE_COUNT (bb->succs) == 0)
8297 {
8298 gimple stmt = last_stmt (bb);
8299 if (!stmt
8300 || (!is_ctrl_stmt (stmt)
8301 && (!is_gimple_call (stmt)
8302 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8303 {
8304 stmt = gimple_build_call
8305 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8306 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8307 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8308 }
8309 }
8310 }
8311 if (count_scale != REG_BR_PROB_BASE)
8312 compute_function_frequency ();
8313
8314 /* We just processed all calls. */
8315 if (cfun->gimple_df)
8316 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8317
8318 /* Dump a textual representation of the flowgraph. */
8319 if (dump_file)
8320 gimple_dump_cfg (dump_file, dump_flags);
8321
8322 if (current_loops
8323 && (todo & TODO_cleanup_cfg))
8324 loops_state_set (LOOPS_NEED_FIXUP);
8325
8326 return todo;
8327 }
8328
8329 namespace {
8330
8331 const pass_data pass_data_fixup_cfg =
8332 {
8333 GIMPLE_PASS, /* type */
8334 "*free_cfg_annotations", /* name */
8335 OPTGROUP_NONE, /* optinfo_flags */
8336 false, /* has_gate */
8337 true, /* has_execute */
8338 TV_NONE, /* tv_id */
8339 PROP_cfg, /* properties_required */
8340 0, /* properties_provided */
8341 0, /* properties_destroyed */
8342 0, /* todo_flags_start */
8343 0, /* todo_flags_finish */
8344 };
8345
8346 class pass_fixup_cfg : public gimple_opt_pass
8347 {
8348 public:
8349 pass_fixup_cfg (gcc::context *ctxt)
8350 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8351 {}
8352
8353 /* opt_pass methods: */
8354 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8355 unsigned int execute () { return execute_fixup_cfg (); }
8356
8357 }; // class pass_fixup_cfg
8358
8359 } // anon namespace
8360
8361 gimple_opt_pass *
8362 make_pass_fixup_cfg (gcc::context *ctxt)
8363 {
8364 return new pass_fixup_cfg (ctxt);
8365 }
8366
8367 /* Garbage collection support for edge_def. */
8368
8369 extern void gt_ggc_mx (tree&);
8370 extern void gt_ggc_mx (gimple&);
8371 extern void gt_ggc_mx (rtx&);
8372 extern void gt_ggc_mx (basic_block&);
8373
8374 void
8375 gt_ggc_mx (edge_def *e)
8376 {
8377 tree block = LOCATION_BLOCK (e->goto_locus);
8378 gt_ggc_mx (e->src);
8379 gt_ggc_mx (e->dest);
8380 if (current_ir_type () == IR_GIMPLE)
8381 gt_ggc_mx (e->insns.g);
8382 else
8383 gt_ggc_mx (e->insns.r);
8384 gt_ggc_mx (block);
8385 }
8386
8387 /* PCH support for edge_def. */
8388
8389 extern void gt_pch_nx (tree&);
8390 extern void gt_pch_nx (gimple&);
8391 extern void gt_pch_nx (rtx&);
8392 extern void gt_pch_nx (basic_block&);
8393
8394 void
8395 gt_pch_nx (edge_def *e)
8396 {
8397 tree block = LOCATION_BLOCK (e->goto_locus);
8398 gt_pch_nx (e->src);
8399 gt_pch_nx (e->dest);
8400 if (current_ir_type () == IR_GIMPLE)
8401 gt_pch_nx (e->insns.g);
8402 else
8403 gt_pch_nx (e->insns.r);
8404 gt_pch_nx (block);
8405 }
8406
8407 void
8408 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8409 {
8410 tree block = LOCATION_BLOCK (e->goto_locus);
8411 op (&(e->src), cookie);
8412 op (&(e->dest), cookie);
8413 if (current_ir_type () == IR_GIMPLE)
8414 op (&(e->insns.g), cookie);
8415 else
8416 op (&(e->insns.r), cookie);
8417 op (&(block), cookie);
8418 }