target.c: New file.
[gcc.git] / gcc / tree-cfg.c
1 /* Control flow functions for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "hash-table.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "ggc.h"
32 #include "gimple-pretty-print.h"
33 #include "tree-ssa.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "diagnostic-core.h"
37 #include "except.h"
38 #include "cfgloop.h"
39 #include "tree-ssa-propagate.h"
40 #include "value-prof.h"
41 #include "pointer-set.h"
42 #include "tree-inline.h"
43 #include "target.h"
44 #include "tree-ssa-live.h"
45
46 /* This file contains functions for building the Control Flow Graph (CFG)
47 for a function tree. */
48
49 /* Local declarations. */
50
51 /* Initial capacity for the basic block array. */
52 static const int initial_cfg_capacity = 20;
53
54 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
55 which use a particular edge. The CASE_LABEL_EXPRs are chained together
56 via their CASE_CHAIN field, which we clear after we're done with the
57 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
58
59 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
60 update the case vector in response to edge redirections.
61
62 Right now this table is set up and torn down at key points in the
63 compilation process. It would be nice if we could make the table
64 more persistent. The key is getting notification of changes to
65 the CFG (particularly edge removal, creation and redirection). */
66
67 static struct pointer_map_t *edge_to_cases;
68
69 /* If we record edge_to_cases, this bitmap will hold indexes
70 of basic blocks that end in a GIMPLE_SWITCH which we touched
71 due to edge manipulations. */
72
73 static bitmap touched_switch_bbs;
74
75 /* CFG statistics. */
76 struct cfg_stats_d
77 {
78 long num_merged_labels;
79 };
80
81 static struct cfg_stats_d cfg_stats;
82
83 /* Nonzero if we found a computed goto while building basic blocks. */
84 static bool found_computed_goto;
85
86 /* Hash table to store last discriminator assigned for each locus. */
87 struct locus_discrim_map
88 {
89 location_t locus;
90 int discriminator;
91 };
92
93 /* Hashtable helpers. */
94
95 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
96 {
97 typedef locus_discrim_map value_type;
98 typedef locus_discrim_map compare_type;
99 static inline hashval_t hash (const value_type *);
100 static inline bool equal (const value_type *, const compare_type *);
101 };
102
103 /* Trivial hash function for a location_t. ITEM is a pointer to
104 a hash table entry that maps a location_t to a discriminator. */
105
106 inline hashval_t
107 locus_discrim_hasher::hash (const value_type *item)
108 {
109 return LOCATION_LINE (item->locus);
110 }
111
112 /* Equality function for the locus-to-discriminator map. A and B
113 point to the two hash table entries to compare. */
114
115 inline bool
116 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
117 {
118 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
119 }
120
121 static hash_table <locus_discrim_hasher> discriminator_per_locus;
122
123 /* Basic blocks and flowgraphs. */
124 static void make_blocks (gimple_seq);
125 static void factor_computed_gotos (void);
126
127 /* Edges. */
128 static void make_edges (void);
129 static void assign_discriminators (void);
130 static void make_cond_expr_edges (basic_block);
131 static void make_gimple_switch_edges (basic_block);
132 static void make_goto_expr_edges (basic_block);
133 static void make_gimple_asm_edges (basic_block);
134 static edge gimple_redirect_edge_and_branch (edge, basic_block);
135 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
136 static unsigned int split_critical_edges (void);
137
138 /* Various helpers. */
139 static inline bool stmt_starts_bb_p (gimple, gimple);
140 static int gimple_verify_flow_info (void);
141 static void gimple_make_forwarder_block (edge);
142 static gimple first_non_label_stmt (basic_block);
143 static bool verify_gimple_transaction (gimple);
144
145 /* Flowgraph optimization and cleanup. */
146 static void gimple_merge_blocks (basic_block, basic_block);
147 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
148 static void remove_bb (basic_block);
149 static edge find_taken_edge_computed_goto (basic_block, tree);
150 static edge find_taken_edge_cond_expr (basic_block, tree);
151 static edge find_taken_edge_switch_expr (basic_block, tree);
152 static tree find_case_label_for_value (gimple, tree);
153
154 void
155 init_empty_tree_cfg_for_function (struct function *fn)
156 {
157 /* Initialize the basic block array. */
158 init_flow (fn);
159 profile_status_for_function (fn) = PROFILE_ABSENT;
160 n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
161 last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
162 vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
163 vec_safe_grow_cleared (basic_block_info_for_function (fn),
164 initial_cfg_capacity);
165
166 /* Build a mapping of labels to their associated blocks. */
167 vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
168 vec_safe_grow_cleared (label_to_block_map_for_function (fn),
169 initial_cfg_capacity);
170
171 SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
172 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
173 SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
174 EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
175
176 ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
177 = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
178 EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
179 = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
180 }
181
182 void
183 init_empty_tree_cfg (void)
184 {
185 init_empty_tree_cfg_for_function (cfun);
186 }
187
188 /*---------------------------------------------------------------------------
189 Create basic blocks
190 ---------------------------------------------------------------------------*/
191
192 /* Entry point to the CFG builder for trees. SEQ is the sequence of
193 statements to be added to the flowgraph. */
194
195 static void
196 build_gimple_cfg (gimple_seq seq)
197 {
198 /* Register specific gimple functions. */
199 gimple_register_cfg_hooks ();
200
201 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
202
203 init_empty_tree_cfg ();
204
205 found_computed_goto = 0;
206 make_blocks (seq);
207
208 /* Computed gotos are hell to deal with, especially if there are
209 lots of them with a large number of destinations. So we factor
210 them to a common computed goto location before we build the
211 edge list. After we convert back to normal form, we will un-factor
212 the computed gotos since factoring introduces an unwanted jump. */
213 if (found_computed_goto)
214 factor_computed_gotos ();
215
216 /* Make sure there is always at least one block, even if it's empty. */
217 if (n_basic_blocks == NUM_FIXED_BLOCKS)
218 create_empty_bb (ENTRY_BLOCK_PTR);
219
220 /* Adjust the size of the array. */
221 if (basic_block_info->length () < (size_t) n_basic_blocks)
222 vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
223
224 /* To speed up statement iterator walks, we first purge dead labels. */
225 cleanup_dead_labels ();
226
227 /* Group case nodes to reduce the number of edges.
228 We do this after cleaning up dead labels because otherwise we miss
229 a lot of obvious case merging opportunities. */
230 group_case_labels ();
231
232 /* Create the edges of the flowgraph. */
233 discriminator_per_locus.create (13);
234 make_edges ();
235 assign_discriminators ();
236 cleanup_dead_labels ();
237 discriminator_per_locus.dispose ();
238 }
239
240 static unsigned int
241 execute_build_cfg (void)
242 {
243 gimple_seq body = gimple_body (current_function_decl);
244
245 build_gimple_cfg (body);
246 gimple_set_body (current_function_decl, NULL);
247 if (dump_file && (dump_flags & TDF_DETAILS))
248 {
249 fprintf (dump_file, "Scope blocks:\n");
250 dump_scope_blocks (dump_file, dump_flags);
251 }
252 cleanup_tree_cfg ();
253 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
254 return 0;
255 }
256
257 namespace {
258
259 const pass_data pass_data_build_cfg =
260 {
261 GIMPLE_PASS, /* type */
262 "cfg", /* name */
263 OPTGROUP_NONE, /* optinfo_flags */
264 false, /* has_gate */
265 true, /* has_execute */
266 TV_TREE_CFG, /* tv_id */
267 PROP_gimple_leh, /* properties_required */
268 ( PROP_cfg | PROP_loops ), /* properties_provided */
269 0, /* properties_destroyed */
270 0, /* todo_flags_start */
271 TODO_verify_stmts, /* todo_flags_finish */
272 };
273
274 class pass_build_cfg : public gimple_opt_pass
275 {
276 public:
277 pass_build_cfg (gcc::context *ctxt)
278 : gimple_opt_pass (pass_data_build_cfg, ctxt)
279 {}
280
281 /* opt_pass methods: */
282 unsigned int execute () { return execute_build_cfg (); }
283
284 }; // class pass_build_cfg
285
286 } // anon namespace
287
288 gimple_opt_pass *
289 make_pass_build_cfg (gcc::context *ctxt)
290 {
291 return new pass_build_cfg (ctxt);
292 }
293
294
295 /* Return true if T is a computed goto. */
296
297 static bool
298 computed_goto_p (gimple t)
299 {
300 return (gimple_code (t) == GIMPLE_GOTO
301 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
302 }
303
304
305 /* Search the CFG for any computed gotos. If found, factor them to a
306 common computed goto site. Also record the location of that site so
307 that we can un-factor the gotos after we have converted back to
308 normal form. */
309
310 static void
311 factor_computed_gotos (void)
312 {
313 basic_block bb;
314 tree factored_label_decl = NULL;
315 tree var = NULL;
316 gimple factored_computed_goto_label = NULL;
317 gimple factored_computed_goto = NULL;
318
319 /* We know there are one or more computed gotos in this function.
320 Examine the last statement in each basic block to see if the block
321 ends with a computed goto. */
322
323 FOR_EACH_BB (bb)
324 {
325 gimple_stmt_iterator gsi = gsi_last_bb (bb);
326 gimple last;
327
328 if (gsi_end_p (gsi))
329 continue;
330
331 last = gsi_stmt (gsi);
332
333 /* Ignore the computed goto we create when we factor the original
334 computed gotos. */
335 if (last == factored_computed_goto)
336 continue;
337
338 /* If the last statement is a computed goto, factor it. */
339 if (computed_goto_p (last))
340 {
341 gimple assignment;
342
343 /* The first time we find a computed goto we need to create
344 the factored goto block and the variable each original
345 computed goto will use for their goto destination. */
346 if (!factored_computed_goto)
347 {
348 basic_block new_bb = create_empty_bb (bb);
349 gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
350
351 /* Create the destination of the factored goto. Each original
352 computed goto will put its desired destination into this
353 variable and jump to the label we create immediately
354 below. */
355 var = create_tmp_var (ptr_type_node, "gotovar");
356
357 /* Build a label for the new block which will contain the
358 factored computed goto. */
359 factored_label_decl = create_artificial_label (UNKNOWN_LOCATION);
360 factored_computed_goto_label
361 = gimple_build_label (factored_label_decl);
362 gsi_insert_after (&new_gsi, factored_computed_goto_label,
363 GSI_NEW_STMT);
364
365 /* Build our new computed goto. */
366 factored_computed_goto = gimple_build_goto (var);
367 gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
368 }
369
370 /* Copy the original computed goto's destination into VAR. */
371 assignment = gimple_build_assign (var, gimple_goto_dest (last));
372 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
373
374 /* And re-vector the computed goto to the new destination. */
375 gimple_goto_set_dest (last, factored_label_decl);
376 }
377 }
378 }
379
380
381 /* Build a flowgraph for the sequence of stmts SEQ. */
382
383 static void
384 make_blocks (gimple_seq seq)
385 {
386 gimple_stmt_iterator i = gsi_start (seq);
387 gimple stmt = NULL;
388 bool start_new_block = true;
389 bool first_stmt_of_seq = true;
390 basic_block bb = ENTRY_BLOCK_PTR;
391
392 while (!gsi_end_p (i))
393 {
394 gimple prev_stmt;
395
396 prev_stmt = stmt;
397 stmt = gsi_stmt (i);
398
399 /* If the statement starts a new basic block or if we have determined
400 in a previous pass that we need to create a new block for STMT, do
401 so now. */
402 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
403 {
404 if (!first_stmt_of_seq)
405 gsi_split_seq_before (&i, &seq);
406 bb = create_basic_block (seq, NULL, bb);
407 start_new_block = false;
408 }
409
410 /* Now add STMT to BB and create the subgraphs for special statement
411 codes. */
412 gimple_set_bb (stmt, bb);
413
414 if (computed_goto_p (stmt))
415 found_computed_goto = true;
416
417 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
418 next iteration. */
419 if (stmt_ends_bb_p (stmt))
420 {
421 /* If the stmt can make abnormal goto use a new temporary
422 for the assignment to the LHS. This makes sure the old value
423 of the LHS is available on the abnormal edge. Otherwise
424 we will end up with overlapping life-ranges for abnormal
425 SSA names. */
426 if (gimple_has_lhs (stmt)
427 && stmt_can_make_abnormal_goto (stmt)
428 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
429 {
430 tree lhs = gimple_get_lhs (stmt);
431 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
432 gimple s = gimple_build_assign (lhs, tmp);
433 gimple_set_location (s, gimple_location (stmt));
434 gimple_set_block (s, gimple_block (stmt));
435 gimple_set_lhs (stmt, tmp);
436 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
437 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
438 DECL_GIMPLE_REG_P (tmp) = 1;
439 gsi_insert_after (&i, s, GSI_SAME_STMT);
440 }
441 start_new_block = true;
442 }
443
444 gsi_next (&i);
445 first_stmt_of_seq = false;
446 }
447 }
448
449
450 /* Create and return a new empty basic block after bb AFTER. */
451
452 static basic_block
453 create_bb (void *h, void *e, basic_block after)
454 {
455 basic_block bb;
456
457 gcc_assert (!e);
458
459 /* Create and initialize a new basic block. Since alloc_block uses
460 GC allocation that clears memory to allocate a basic block, we do
461 not have to clear the newly allocated basic block here. */
462 bb = alloc_block ();
463
464 bb->index = last_basic_block;
465 bb->flags = BB_NEW;
466 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
467
468 /* Add the new block to the linked list of blocks. */
469 link_block (bb, after);
470
471 /* Grow the basic block array if needed. */
472 if ((size_t) last_basic_block == basic_block_info->length ())
473 {
474 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
475 vec_safe_grow_cleared (basic_block_info, new_size);
476 }
477
478 /* Add the newly created block to the array. */
479 SET_BASIC_BLOCK (last_basic_block, bb);
480
481 n_basic_blocks++;
482 last_basic_block++;
483
484 return bb;
485 }
486
487
488 /*---------------------------------------------------------------------------
489 Edge creation
490 ---------------------------------------------------------------------------*/
491
492 /* Fold COND_EXPR_COND of each COND_EXPR. */
493
494 void
495 fold_cond_expr_cond (void)
496 {
497 basic_block bb;
498
499 FOR_EACH_BB (bb)
500 {
501 gimple stmt = last_stmt (bb);
502
503 if (stmt && gimple_code (stmt) == GIMPLE_COND)
504 {
505 location_t loc = gimple_location (stmt);
506 tree cond;
507 bool zerop, onep;
508
509 fold_defer_overflow_warnings ();
510 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
511 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
512 if (cond)
513 {
514 zerop = integer_zerop (cond);
515 onep = integer_onep (cond);
516 }
517 else
518 zerop = onep = false;
519
520 fold_undefer_overflow_warnings (zerop || onep,
521 stmt,
522 WARN_STRICT_OVERFLOW_CONDITIONAL);
523 if (zerop)
524 gimple_cond_make_false (stmt);
525 else if (onep)
526 gimple_cond_make_true (stmt);
527 }
528 }
529 }
530
531 /* Join all the blocks in the flowgraph. */
532
533 static void
534 make_edges (void)
535 {
536 basic_block bb;
537 struct omp_region *cur_region = NULL;
538
539 /* Create an edge from entry to the first block with executable
540 statements in it. */
541 make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
542
543 /* Traverse the basic block array placing edges. */
544 FOR_EACH_BB (bb)
545 {
546 gimple last = last_stmt (bb);
547 bool fallthru;
548
549 if (last)
550 {
551 enum gimple_code code = gimple_code (last);
552 switch (code)
553 {
554 case GIMPLE_GOTO:
555 make_goto_expr_edges (bb);
556 fallthru = false;
557 break;
558 case GIMPLE_RETURN:
559 make_edge (bb, EXIT_BLOCK_PTR, 0);
560 fallthru = false;
561 break;
562 case GIMPLE_COND:
563 make_cond_expr_edges (bb);
564 fallthru = false;
565 break;
566 case GIMPLE_SWITCH:
567 make_gimple_switch_edges (bb);
568 fallthru = false;
569 break;
570 case GIMPLE_RESX:
571 make_eh_edges (last);
572 fallthru = false;
573 break;
574 case GIMPLE_EH_DISPATCH:
575 fallthru = make_eh_dispatch_edges (last);
576 break;
577
578 case GIMPLE_CALL:
579 /* If this function receives a nonlocal goto, then we need to
580 make edges from this call site to all the nonlocal goto
581 handlers. */
582 if (stmt_can_make_abnormal_goto (last))
583 make_abnormal_goto_edges (bb, true);
584
585 /* If this statement has reachable exception handlers, then
586 create abnormal edges to them. */
587 make_eh_edges (last);
588
589 /* BUILTIN_RETURN is really a return statement. */
590 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
591 make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
592 /* Some calls are known not to return. */
593 else
594 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
595 break;
596
597 case GIMPLE_ASSIGN:
598 /* A GIMPLE_ASSIGN may throw internally and thus be considered
599 control-altering. */
600 if (is_ctrl_altering_stmt (last))
601 make_eh_edges (last);
602 fallthru = true;
603 break;
604
605 case GIMPLE_ASM:
606 make_gimple_asm_edges (bb);
607 fallthru = true;
608 break;
609
610 case GIMPLE_OMP_PARALLEL:
611 case GIMPLE_OMP_TASK:
612 case GIMPLE_OMP_FOR:
613 case GIMPLE_OMP_SINGLE:
614 case GIMPLE_OMP_TEAMS:
615 case GIMPLE_OMP_MASTER:
616 case GIMPLE_OMP_TASKGROUP:
617 case GIMPLE_OMP_ORDERED:
618 case GIMPLE_OMP_CRITICAL:
619 case GIMPLE_OMP_SECTION:
620 cur_region = new_omp_region (bb, code, cur_region);
621 fallthru = true;
622 break;
623
624 case GIMPLE_OMP_TARGET:
625 cur_region = new_omp_region (bb, code, cur_region);
626 fallthru = true;
627 if (gimple_omp_target_kind (last) == GF_OMP_TARGET_KIND_UPDATE)
628 cur_region = cur_region->outer;
629 break;
630
631 case GIMPLE_OMP_SECTIONS:
632 cur_region = new_omp_region (bb, code, cur_region);
633 fallthru = true;
634 break;
635
636 case GIMPLE_OMP_SECTIONS_SWITCH:
637 fallthru = false;
638 break;
639
640 case GIMPLE_OMP_ATOMIC_LOAD:
641 case GIMPLE_OMP_ATOMIC_STORE:
642 fallthru = true;
643 break;
644
645 case GIMPLE_OMP_RETURN:
646 /* In the case of a GIMPLE_OMP_SECTION, the edge will go
647 somewhere other than the next block. This will be
648 created later. */
649 cur_region->exit = bb;
650 fallthru = cur_region->type != GIMPLE_OMP_SECTION;
651 cur_region = cur_region->outer;
652 break;
653
654 case GIMPLE_OMP_CONTINUE:
655 cur_region->cont = bb;
656 switch (cur_region->type)
657 {
658 case GIMPLE_OMP_FOR:
659 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
660 succs edges as abnormal to prevent splitting
661 them. */
662 single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
663 /* Make the loopback edge. */
664 make_edge (bb, single_succ (cur_region->entry),
665 EDGE_ABNORMAL);
666
667 /* Create an edge from GIMPLE_OMP_FOR to exit, which
668 corresponds to the case that the body of the loop
669 is not executed at all. */
670 make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
671 make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
672 fallthru = false;
673 break;
674
675 case GIMPLE_OMP_SECTIONS:
676 /* Wire up the edges into and out of the nested sections. */
677 {
678 basic_block switch_bb = single_succ (cur_region->entry);
679
680 struct omp_region *i;
681 for (i = cur_region->inner; i ; i = i->next)
682 {
683 gcc_assert (i->type == GIMPLE_OMP_SECTION);
684 make_edge (switch_bb, i->entry, 0);
685 make_edge (i->exit, bb, EDGE_FALLTHRU);
686 }
687
688 /* Make the loopback edge to the block with
689 GIMPLE_OMP_SECTIONS_SWITCH. */
690 make_edge (bb, switch_bb, 0);
691
692 /* Make the edge from the switch to exit. */
693 make_edge (switch_bb, bb->next_bb, 0);
694 fallthru = false;
695 }
696 break;
697
698 default:
699 gcc_unreachable ();
700 }
701 break;
702
703 case GIMPLE_TRANSACTION:
704 {
705 tree abort_label = gimple_transaction_label (last);
706 if (abort_label)
707 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
708 fallthru = true;
709 }
710 break;
711
712 default:
713 gcc_assert (!stmt_ends_bb_p (last));
714 fallthru = true;
715 }
716 }
717 else
718 fallthru = true;
719
720 if (fallthru)
721 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
722 }
723
724 if (root_omp_region)
725 free_omp_regions ();
726
727 /* Fold COND_EXPR_COND of each COND_EXPR. */
728 fold_cond_expr_cond ();
729 }
730
731 /* Find the next available discriminator value for LOCUS. The
732 discriminator distinguishes among several basic blocks that
733 share a common locus, allowing for more accurate sample-based
734 profiling. */
735
736 static int
737 next_discriminator_for_locus (location_t locus)
738 {
739 struct locus_discrim_map item;
740 struct locus_discrim_map **slot;
741
742 item.locus = locus;
743 item.discriminator = 0;
744 slot = discriminator_per_locus.find_slot_with_hash (
745 &item, LOCATION_LINE (locus), INSERT);
746 gcc_assert (slot);
747 if (*slot == HTAB_EMPTY_ENTRY)
748 {
749 *slot = XNEW (struct locus_discrim_map);
750 gcc_assert (*slot);
751 (*slot)->locus = locus;
752 (*slot)->discriminator = 0;
753 }
754 (*slot)->discriminator++;
755 return (*slot)->discriminator;
756 }
757
758 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
759
760 static bool
761 same_line_p (location_t locus1, location_t locus2)
762 {
763 expanded_location from, to;
764
765 if (locus1 == locus2)
766 return true;
767
768 from = expand_location (locus1);
769 to = expand_location (locus2);
770
771 if (from.line != to.line)
772 return false;
773 if (from.file == to.file)
774 return true;
775 return (from.file != NULL
776 && to.file != NULL
777 && filename_cmp (from.file, to.file) == 0);
778 }
779
780 /* Assign discriminators to each basic block. */
781
782 static void
783 assign_discriminators (void)
784 {
785 basic_block bb;
786
787 FOR_EACH_BB (bb)
788 {
789 edge e;
790 edge_iterator ei;
791 gimple last = last_stmt (bb);
792 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
793
794 if (locus == UNKNOWN_LOCATION)
795 continue;
796
797 FOR_EACH_EDGE (e, ei, bb->succs)
798 {
799 gimple first = first_non_label_stmt (e->dest);
800 gimple last = last_stmt (e->dest);
801 if ((first && same_line_p (locus, gimple_location (first)))
802 || (last && same_line_p (locus, gimple_location (last))))
803 {
804 if (e->dest->discriminator != 0 && bb->discriminator == 0)
805 bb->discriminator = next_discriminator_for_locus (locus);
806 else
807 e->dest->discriminator = next_discriminator_for_locus (locus);
808 }
809 }
810 }
811 }
812
813 /* Create the edges for a GIMPLE_COND starting at block BB. */
814
815 static void
816 make_cond_expr_edges (basic_block bb)
817 {
818 gimple entry = last_stmt (bb);
819 gimple then_stmt, else_stmt;
820 basic_block then_bb, else_bb;
821 tree then_label, else_label;
822 edge e;
823
824 gcc_assert (entry);
825 gcc_assert (gimple_code (entry) == GIMPLE_COND);
826
827 /* Entry basic blocks for each component. */
828 then_label = gimple_cond_true_label (entry);
829 else_label = gimple_cond_false_label (entry);
830 then_bb = label_to_block (then_label);
831 else_bb = label_to_block (else_label);
832 then_stmt = first_stmt (then_bb);
833 else_stmt = first_stmt (else_bb);
834
835 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
836 e->goto_locus = gimple_location (then_stmt);
837 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
838 if (e)
839 e->goto_locus = gimple_location (else_stmt);
840
841 /* We do not need the labels anymore. */
842 gimple_cond_set_true_label (entry, NULL_TREE);
843 gimple_cond_set_false_label (entry, NULL_TREE);
844 }
845
846
847 /* Called for each element in the hash table (P) as we delete the
848 edge to cases hash table.
849
850 Clear all the TREE_CHAINs to prevent problems with copying of
851 SWITCH_EXPRs and structure sharing rules, then free the hash table
852 element. */
853
854 static bool
855 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
856 void *data ATTRIBUTE_UNUSED)
857 {
858 tree t, next;
859
860 for (t = (tree) *value; t; t = next)
861 {
862 next = CASE_CHAIN (t);
863 CASE_CHAIN (t) = NULL;
864 }
865
866 *value = NULL;
867 return true;
868 }
869
870 /* Start recording information mapping edges to case labels. */
871
872 void
873 start_recording_case_labels (void)
874 {
875 gcc_assert (edge_to_cases == NULL);
876 edge_to_cases = pointer_map_create ();
877 touched_switch_bbs = BITMAP_ALLOC (NULL);
878 }
879
880 /* Return nonzero if we are recording information for case labels. */
881
882 static bool
883 recording_case_labels_p (void)
884 {
885 return (edge_to_cases != NULL);
886 }
887
888 /* Stop recording information mapping edges to case labels and
889 remove any information we have recorded. */
890 void
891 end_recording_case_labels (void)
892 {
893 bitmap_iterator bi;
894 unsigned i;
895 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
896 pointer_map_destroy (edge_to_cases);
897 edge_to_cases = NULL;
898 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
899 {
900 basic_block bb = BASIC_BLOCK (i);
901 if (bb)
902 {
903 gimple stmt = last_stmt (bb);
904 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
905 group_case_labels_stmt (stmt);
906 }
907 }
908 BITMAP_FREE (touched_switch_bbs);
909 }
910
911 /* If we are inside a {start,end}_recording_cases block, then return
912 a chain of CASE_LABEL_EXPRs from T which reference E.
913
914 Otherwise return NULL. */
915
916 static tree
917 get_cases_for_edge (edge e, gimple t)
918 {
919 void **slot;
920 size_t i, n;
921
922 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
923 chains available. Return NULL so the caller can detect this case. */
924 if (!recording_case_labels_p ())
925 return NULL;
926
927 slot = pointer_map_contains (edge_to_cases, e);
928 if (slot)
929 return (tree) *slot;
930
931 /* If we did not find E in the hash table, then this must be the first
932 time we have been queried for information about E & T. Add all the
933 elements from T to the hash table then perform the query again. */
934
935 n = gimple_switch_num_labels (t);
936 for (i = 0; i < n; i++)
937 {
938 tree elt = gimple_switch_label (t, i);
939 tree lab = CASE_LABEL (elt);
940 basic_block label_bb = label_to_block (lab);
941 edge this_edge = find_edge (e->src, label_bb);
942
943 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
944 a new chain. */
945 slot = pointer_map_insert (edge_to_cases, this_edge);
946 CASE_CHAIN (elt) = (tree) *slot;
947 *slot = elt;
948 }
949
950 return (tree) *pointer_map_contains (edge_to_cases, e);
951 }
952
953 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
954
955 static void
956 make_gimple_switch_edges (basic_block bb)
957 {
958 gimple entry = last_stmt (bb);
959 size_t i, n;
960
961 n = gimple_switch_num_labels (entry);
962
963 for (i = 0; i < n; ++i)
964 {
965 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
966 basic_block label_bb = label_to_block (lab);
967 make_edge (bb, label_bb, 0);
968 }
969 }
970
971
972 /* Return the basic block holding label DEST. */
973
974 basic_block
975 label_to_block_fn (struct function *ifun, tree dest)
976 {
977 int uid = LABEL_DECL_UID (dest);
978
979 /* We would die hard when faced by an undefined label. Emit a label to
980 the very first basic block. This will hopefully make even the dataflow
981 and undefined variable warnings quite right. */
982 if (seen_error () && uid < 0)
983 {
984 gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
985 gimple stmt;
986
987 stmt = gimple_build_label (dest);
988 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
989 uid = LABEL_DECL_UID (dest);
990 }
991 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
992 return NULL;
993 return (*ifun->cfg->x_label_to_block_map)[uid];
994 }
995
996 /* Create edges for an abnormal goto statement at block BB. If FOR_CALL
997 is true, the source statement is a CALL_EXPR instead of a GOTO_EXPR. */
998
999 void
1000 make_abnormal_goto_edges (basic_block bb, bool for_call)
1001 {
1002 basic_block target_bb;
1003 gimple_stmt_iterator gsi;
1004
1005 FOR_EACH_BB (target_bb)
1006 {
1007 for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
1008 {
1009 gimple label_stmt = gsi_stmt (gsi);
1010 tree target;
1011
1012 if (gimple_code (label_stmt) != GIMPLE_LABEL)
1013 break;
1014
1015 target = gimple_label_label (label_stmt);
1016
1017 /* Make an edge to every label block that has been marked as a
1018 potential target for a computed goto or a non-local goto. */
1019 if ((FORCED_LABEL (target) && !for_call)
1020 || (DECL_NONLOCAL (target) && for_call))
1021 {
1022 make_edge (bb, target_bb, EDGE_ABNORMAL);
1023 break;
1024 }
1025 }
1026 if (!gsi_end_p (gsi)
1027 && is_gimple_debug (gsi_stmt (gsi)))
1028 gsi_next_nondebug (&gsi);
1029 if (!gsi_end_p (gsi))
1030 {
1031 /* Make an edge to every setjmp-like call. */
1032 gimple call_stmt = gsi_stmt (gsi);
1033 if (is_gimple_call (call_stmt)
1034 && (gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE))
1035 make_edge (bb, target_bb, EDGE_ABNORMAL);
1036 }
1037 }
1038 }
1039
1040 /* Create edges for a goto statement at block BB. */
1041
1042 static void
1043 make_goto_expr_edges (basic_block bb)
1044 {
1045 gimple_stmt_iterator last = gsi_last_bb (bb);
1046 gimple goto_t = gsi_stmt (last);
1047
1048 /* A simple GOTO creates normal edges. */
1049 if (simple_goto_p (goto_t))
1050 {
1051 tree dest = gimple_goto_dest (goto_t);
1052 basic_block label_bb = label_to_block (dest);
1053 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1054 e->goto_locus = gimple_location (goto_t);
1055 gsi_remove (&last, true);
1056 return;
1057 }
1058
1059 /* A computed GOTO creates abnormal edges. */
1060 make_abnormal_goto_edges (bb, false);
1061 }
1062
1063 /* Create edges for an asm statement with labels at block BB. */
1064
1065 static void
1066 make_gimple_asm_edges (basic_block bb)
1067 {
1068 gimple stmt = last_stmt (bb);
1069 int i, n = gimple_asm_nlabels (stmt);
1070
1071 for (i = 0; i < n; ++i)
1072 {
1073 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1074 basic_block label_bb = label_to_block (label);
1075 make_edge (bb, label_bb, 0);
1076 }
1077 }
1078
1079 /*---------------------------------------------------------------------------
1080 Flowgraph analysis
1081 ---------------------------------------------------------------------------*/
1082
1083 /* Cleanup useless labels in basic blocks. This is something we wish
1084 to do early because it allows us to group case labels before creating
1085 the edges for the CFG, and it speeds up block statement iterators in
1086 all passes later on.
1087 We rerun this pass after CFG is created, to get rid of the labels that
1088 are no longer referenced. After then we do not run it any more, since
1089 (almost) no new labels should be created. */
1090
1091 /* A map from basic block index to the leading label of that block. */
1092 static struct label_record
1093 {
1094 /* The label. */
1095 tree label;
1096
1097 /* True if the label is referenced from somewhere. */
1098 bool used;
1099 } *label_for_bb;
1100
1101 /* Given LABEL return the first label in the same basic block. */
1102
1103 static tree
1104 main_block_label (tree label)
1105 {
1106 basic_block bb = label_to_block (label);
1107 tree main_label = label_for_bb[bb->index].label;
1108
1109 /* label_to_block possibly inserted undefined label into the chain. */
1110 if (!main_label)
1111 {
1112 label_for_bb[bb->index].label = label;
1113 main_label = label;
1114 }
1115
1116 label_for_bb[bb->index].used = true;
1117 return main_label;
1118 }
1119
1120 /* Clean up redundant labels within the exception tree. */
1121
1122 static void
1123 cleanup_dead_labels_eh (void)
1124 {
1125 eh_landing_pad lp;
1126 eh_region r;
1127 tree lab;
1128 int i;
1129
1130 if (cfun->eh == NULL)
1131 return;
1132
1133 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1134 if (lp && lp->post_landing_pad)
1135 {
1136 lab = main_block_label (lp->post_landing_pad);
1137 if (lab != lp->post_landing_pad)
1138 {
1139 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1140 EH_LANDING_PAD_NR (lab) = lp->index;
1141 }
1142 }
1143
1144 FOR_ALL_EH_REGION (r)
1145 switch (r->type)
1146 {
1147 case ERT_CLEANUP:
1148 case ERT_MUST_NOT_THROW:
1149 break;
1150
1151 case ERT_TRY:
1152 {
1153 eh_catch c;
1154 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1155 {
1156 lab = c->label;
1157 if (lab)
1158 c->label = main_block_label (lab);
1159 }
1160 }
1161 break;
1162
1163 case ERT_ALLOWED_EXCEPTIONS:
1164 lab = r->u.allowed.label;
1165 if (lab)
1166 r->u.allowed.label = main_block_label (lab);
1167 break;
1168 }
1169 }
1170
1171
1172 /* Cleanup redundant labels. This is a three-step process:
1173 1) Find the leading label for each block.
1174 2) Redirect all references to labels to the leading labels.
1175 3) Cleanup all useless labels. */
1176
1177 void
1178 cleanup_dead_labels (void)
1179 {
1180 basic_block bb;
1181 label_for_bb = XCNEWVEC (struct label_record, last_basic_block);
1182
1183 /* Find a suitable label for each block. We use the first user-defined
1184 label if there is one, or otherwise just the first label we see. */
1185 FOR_EACH_BB (bb)
1186 {
1187 gimple_stmt_iterator i;
1188
1189 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1190 {
1191 tree label;
1192 gimple stmt = gsi_stmt (i);
1193
1194 if (gimple_code (stmt) != GIMPLE_LABEL)
1195 break;
1196
1197 label = gimple_label_label (stmt);
1198
1199 /* If we have not yet seen a label for the current block,
1200 remember this one and see if there are more labels. */
1201 if (!label_for_bb[bb->index].label)
1202 {
1203 label_for_bb[bb->index].label = label;
1204 continue;
1205 }
1206
1207 /* If we did see a label for the current block already, but it
1208 is an artificially created label, replace it if the current
1209 label is a user defined label. */
1210 if (!DECL_ARTIFICIAL (label)
1211 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1212 {
1213 label_for_bb[bb->index].label = label;
1214 break;
1215 }
1216 }
1217 }
1218
1219 /* Now redirect all jumps/branches to the selected label.
1220 First do so for each block ending in a control statement. */
1221 FOR_EACH_BB (bb)
1222 {
1223 gimple stmt = last_stmt (bb);
1224 tree label, new_label;
1225
1226 if (!stmt)
1227 continue;
1228
1229 switch (gimple_code (stmt))
1230 {
1231 case GIMPLE_COND:
1232 label = gimple_cond_true_label (stmt);
1233 if (label)
1234 {
1235 new_label = main_block_label (label);
1236 if (new_label != label)
1237 gimple_cond_set_true_label (stmt, new_label);
1238 }
1239
1240 label = gimple_cond_false_label (stmt);
1241 if (label)
1242 {
1243 new_label = main_block_label (label);
1244 if (new_label != label)
1245 gimple_cond_set_false_label (stmt, new_label);
1246 }
1247 break;
1248
1249 case GIMPLE_SWITCH:
1250 {
1251 size_t i, n = gimple_switch_num_labels (stmt);
1252
1253 /* Replace all destination labels. */
1254 for (i = 0; i < n; ++i)
1255 {
1256 tree case_label = gimple_switch_label (stmt, i);
1257 label = CASE_LABEL (case_label);
1258 new_label = main_block_label (label);
1259 if (new_label != label)
1260 CASE_LABEL (case_label) = new_label;
1261 }
1262 break;
1263 }
1264
1265 case GIMPLE_ASM:
1266 {
1267 int i, n = gimple_asm_nlabels (stmt);
1268
1269 for (i = 0; i < n; ++i)
1270 {
1271 tree cons = gimple_asm_label_op (stmt, i);
1272 tree label = main_block_label (TREE_VALUE (cons));
1273 TREE_VALUE (cons) = label;
1274 }
1275 break;
1276 }
1277
1278 /* We have to handle gotos until they're removed, and we don't
1279 remove them until after we've created the CFG edges. */
1280 case GIMPLE_GOTO:
1281 if (!computed_goto_p (stmt))
1282 {
1283 label = gimple_goto_dest (stmt);
1284 new_label = main_block_label (label);
1285 if (new_label != label)
1286 gimple_goto_set_dest (stmt, new_label);
1287 }
1288 break;
1289
1290 case GIMPLE_TRANSACTION:
1291 {
1292 tree label = gimple_transaction_label (stmt);
1293 if (label)
1294 {
1295 tree new_label = main_block_label (label);
1296 if (new_label != label)
1297 gimple_transaction_set_label (stmt, new_label);
1298 }
1299 }
1300 break;
1301
1302 default:
1303 break;
1304 }
1305 }
1306
1307 /* Do the same for the exception region tree labels. */
1308 cleanup_dead_labels_eh ();
1309
1310 /* Finally, purge dead labels. All user-defined labels and labels that
1311 can be the target of non-local gotos and labels which have their
1312 address taken are preserved. */
1313 FOR_EACH_BB (bb)
1314 {
1315 gimple_stmt_iterator i;
1316 tree label_for_this_bb = label_for_bb[bb->index].label;
1317
1318 if (!label_for_this_bb)
1319 continue;
1320
1321 /* If the main label of the block is unused, we may still remove it. */
1322 if (!label_for_bb[bb->index].used)
1323 label_for_this_bb = NULL;
1324
1325 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1326 {
1327 tree label;
1328 gimple stmt = gsi_stmt (i);
1329
1330 if (gimple_code (stmt) != GIMPLE_LABEL)
1331 break;
1332
1333 label = gimple_label_label (stmt);
1334
1335 if (label == label_for_this_bb
1336 || !DECL_ARTIFICIAL (label)
1337 || DECL_NONLOCAL (label)
1338 || FORCED_LABEL (label))
1339 gsi_next (&i);
1340 else
1341 gsi_remove (&i, true);
1342 }
1343 }
1344
1345 free (label_for_bb);
1346 }
1347
1348 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1349 the ones jumping to the same label.
1350 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1351
1352 void
1353 group_case_labels_stmt (gimple stmt)
1354 {
1355 int old_size = gimple_switch_num_labels (stmt);
1356 int i, j, new_size = old_size;
1357 basic_block default_bb = NULL;
1358
1359 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1360
1361 /* Look for possible opportunities to merge cases. */
1362 i = 1;
1363 while (i < old_size)
1364 {
1365 tree base_case, base_high;
1366 basic_block base_bb;
1367
1368 base_case = gimple_switch_label (stmt, i);
1369
1370 gcc_assert (base_case);
1371 base_bb = label_to_block (CASE_LABEL (base_case));
1372
1373 /* Discard cases that have the same destination as the
1374 default case. */
1375 if (base_bb == default_bb)
1376 {
1377 gimple_switch_set_label (stmt, i, NULL_TREE);
1378 i++;
1379 new_size--;
1380 continue;
1381 }
1382
1383 base_high = CASE_HIGH (base_case)
1384 ? CASE_HIGH (base_case)
1385 : CASE_LOW (base_case);
1386 i++;
1387
1388 /* Try to merge case labels. Break out when we reach the end
1389 of the label vector or when we cannot merge the next case
1390 label with the current one. */
1391 while (i < old_size)
1392 {
1393 tree merge_case = gimple_switch_label (stmt, i);
1394 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1395 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1396
1397 /* Merge the cases if they jump to the same place,
1398 and their ranges are consecutive. */
1399 if (merge_bb == base_bb
1400 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1401 {
1402 base_high = CASE_HIGH (merge_case) ?
1403 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1404 CASE_HIGH (base_case) = base_high;
1405 gimple_switch_set_label (stmt, i, NULL_TREE);
1406 new_size--;
1407 i++;
1408 }
1409 else
1410 break;
1411 }
1412 }
1413
1414 /* Compress the case labels in the label vector, and adjust the
1415 length of the vector. */
1416 for (i = 0, j = 0; i < new_size; i++)
1417 {
1418 while (! gimple_switch_label (stmt, j))
1419 j++;
1420 gimple_switch_set_label (stmt, i,
1421 gimple_switch_label (stmt, j++));
1422 }
1423
1424 gcc_assert (new_size <= old_size);
1425 gimple_switch_set_num_labels (stmt, new_size);
1426 }
1427
1428 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1429 and scan the sorted vector of cases. Combine the ones jumping to the
1430 same label. */
1431
1432 void
1433 group_case_labels (void)
1434 {
1435 basic_block bb;
1436
1437 FOR_EACH_BB (bb)
1438 {
1439 gimple stmt = last_stmt (bb);
1440 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1441 group_case_labels_stmt (stmt);
1442 }
1443 }
1444
1445 /* Checks whether we can merge block B into block A. */
1446
1447 static bool
1448 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1449 {
1450 gimple stmt;
1451 gimple_stmt_iterator gsi;
1452
1453 if (!single_succ_p (a))
1454 return false;
1455
1456 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1457 return false;
1458
1459 if (single_succ (a) != b)
1460 return false;
1461
1462 if (!single_pred_p (b))
1463 return false;
1464
1465 if (b == EXIT_BLOCK_PTR)
1466 return false;
1467
1468 /* If A ends by a statement causing exceptions or something similar, we
1469 cannot merge the blocks. */
1470 stmt = last_stmt (a);
1471 if (stmt && stmt_ends_bb_p (stmt))
1472 return false;
1473
1474 /* Do not allow a block with only a non-local label to be merged. */
1475 if (stmt
1476 && gimple_code (stmt) == GIMPLE_LABEL
1477 && DECL_NONLOCAL (gimple_label_label (stmt)))
1478 return false;
1479
1480 /* Examine the labels at the beginning of B. */
1481 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1482 {
1483 tree lab;
1484 stmt = gsi_stmt (gsi);
1485 if (gimple_code (stmt) != GIMPLE_LABEL)
1486 break;
1487 lab = gimple_label_label (stmt);
1488
1489 /* Do not remove user forced labels or for -O0 any user labels. */
1490 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1491 return false;
1492 }
1493
1494 /* Protect the loop latches. */
1495 if (current_loops && b->loop_father->latch == b)
1496 return false;
1497
1498 /* It must be possible to eliminate all phi nodes in B. If ssa form
1499 is not up-to-date and a name-mapping is registered, we cannot eliminate
1500 any phis. Symbols marked for renaming are never a problem though. */
1501 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1502 {
1503 gimple phi = gsi_stmt (gsi);
1504 /* Technically only new names matter. */
1505 if (name_registered_for_update_p (PHI_RESULT (phi)))
1506 return false;
1507 }
1508
1509 /* When not optimizing, don't merge if we'd lose goto_locus. */
1510 if (!optimize
1511 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1512 {
1513 location_t goto_locus = single_succ_edge (a)->goto_locus;
1514 gimple_stmt_iterator prev, next;
1515 prev = gsi_last_nondebug_bb (a);
1516 next = gsi_after_labels (b);
1517 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1518 gsi_next_nondebug (&next);
1519 if ((gsi_end_p (prev)
1520 || gimple_location (gsi_stmt (prev)) != goto_locus)
1521 && (gsi_end_p (next)
1522 || gimple_location (gsi_stmt (next)) != goto_locus))
1523 return false;
1524 }
1525
1526 return true;
1527 }
1528
1529 /* Replaces all uses of NAME by VAL. */
1530
1531 void
1532 replace_uses_by (tree name, tree val)
1533 {
1534 imm_use_iterator imm_iter;
1535 use_operand_p use;
1536 gimple stmt;
1537 edge e;
1538
1539 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1540 {
1541 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1542 {
1543 replace_exp (use, val);
1544
1545 if (gimple_code (stmt) == GIMPLE_PHI)
1546 {
1547 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1548 if (e->flags & EDGE_ABNORMAL)
1549 {
1550 /* This can only occur for virtual operands, since
1551 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1552 would prevent replacement. */
1553 gcc_checking_assert (virtual_operand_p (name));
1554 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1555 }
1556 }
1557 }
1558
1559 if (gimple_code (stmt) != GIMPLE_PHI)
1560 {
1561 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1562 gimple orig_stmt = stmt;
1563 size_t i;
1564
1565 /* Mark the block if we changed the last stmt in it. */
1566 if (cfgcleanup_altered_bbs
1567 && stmt_ends_bb_p (stmt))
1568 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1569
1570 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1571 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1572 only change sth from non-invariant to invariant, and only
1573 when propagating constants. */
1574 if (is_gimple_min_invariant (val))
1575 for (i = 0; i < gimple_num_ops (stmt); i++)
1576 {
1577 tree op = gimple_op (stmt, i);
1578 /* Operands may be empty here. For example, the labels
1579 of a GIMPLE_COND are nulled out following the creation
1580 of the corresponding CFG edges. */
1581 if (op && TREE_CODE (op) == ADDR_EXPR)
1582 recompute_tree_invariant_for_addr_expr (op);
1583 }
1584
1585 if (fold_stmt (&gsi))
1586 stmt = gsi_stmt (gsi);
1587
1588 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1589 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1590
1591 update_stmt (stmt);
1592 }
1593 }
1594
1595 gcc_checking_assert (has_zero_uses (name));
1596
1597 /* Also update the trees stored in loop structures. */
1598 if (current_loops)
1599 {
1600 struct loop *loop;
1601 loop_iterator li;
1602
1603 FOR_EACH_LOOP (li, loop, 0)
1604 {
1605 substitute_in_loop_info (loop, name, val);
1606 }
1607 }
1608 }
1609
1610 /* Merge block B into block A. */
1611
1612 static void
1613 gimple_merge_blocks (basic_block a, basic_block b)
1614 {
1615 gimple_stmt_iterator last, gsi, psi;
1616
1617 if (dump_file)
1618 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1619
1620 /* Remove all single-valued PHI nodes from block B of the form
1621 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1622 gsi = gsi_last_bb (a);
1623 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1624 {
1625 gimple phi = gsi_stmt (psi);
1626 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1627 gimple copy;
1628 bool may_replace_uses = (virtual_operand_p (def)
1629 || may_propagate_copy (def, use));
1630
1631 /* In case we maintain loop closed ssa form, do not propagate arguments
1632 of loop exit phi nodes. */
1633 if (current_loops
1634 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1635 && !virtual_operand_p (def)
1636 && TREE_CODE (use) == SSA_NAME
1637 && a->loop_father != b->loop_father)
1638 may_replace_uses = false;
1639
1640 if (!may_replace_uses)
1641 {
1642 gcc_assert (!virtual_operand_p (def));
1643
1644 /* Note that just emitting the copies is fine -- there is no problem
1645 with ordering of phi nodes. This is because A is the single
1646 predecessor of B, therefore results of the phi nodes cannot
1647 appear as arguments of the phi nodes. */
1648 copy = gimple_build_assign (def, use);
1649 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1650 remove_phi_node (&psi, false);
1651 }
1652 else
1653 {
1654 /* If we deal with a PHI for virtual operands, we can simply
1655 propagate these without fussing with folding or updating
1656 the stmt. */
1657 if (virtual_operand_p (def))
1658 {
1659 imm_use_iterator iter;
1660 use_operand_p use_p;
1661 gimple stmt;
1662
1663 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1664 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1665 SET_USE (use_p, use);
1666
1667 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1668 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1669 }
1670 else
1671 replace_uses_by (def, use);
1672
1673 remove_phi_node (&psi, true);
1674 }
1675 }
1676
1677 /* Ensure that B follows A. */
1678 move_block_after (b, a);
1679
1680 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1681 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1682
1683 /* Remove labels from B and set gimple_bb to A for other statements. */
1684 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1685 {
1686 gimple stmt = gsi_stmt (gsi);
1687 if (gimple_code (stmt) == GIMPLE_LABEL)
1688 {
1689 tree label = gimple_label_label (stmt);
1690 int lp_nr;
1691
1692 gsi_remove (&gsi, false);
1693
1694 /* Now that we can thread computed gotos, we might have
1695 a situation where we have a forced label in block B
1696 However, the label at the start of block B might still be
1697 used in other ways (think about the runtime checking for
1698 Fortran assigned gotos). So we can not just delete the
1699 label. Instead we move the label to the start of block A. */
1700 if (FORCED_LABEL (label))
1701 {
1702 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1703 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1704 }
1705 /* Other user labels keep around in a form of a debug stmt. */
1706 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1707 {
1708 gimple dbg = gimple_build_debug_bind (label,
1709 integer_zero_node,
1710 stmt);
1711 gimple_debug_bind_reset_value (dbg);
1712 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1713 }
1714
1715 lp_nr = EH_LANDING_PAD_NR (label);
1716 if (lp_nr)
1717 {
1718 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1719 lp->post_landing_pad = NULL;
1720 }
1721 }
1722 else
1723 {
1724 gimple_set_bb (stmt, a);
1725 gsi_next (&gsi);
1726 }
1727 }
1728
1729 /* Merge the sequences. */
1730 last = gsi_last_bb (a);
1731 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1732 set_bb_seq (b, NULL);
1733
1734 if (cfgcleanup_altered_bbs)
1735 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1736 }
1737
1738
1739 /* Return the one of two successors of BB that is not reachable by a
1740 complex edge, if there is one. Else, return BB. We use
1741 this in optimizations that use post-dominators for their heuristics,
1742 to catch the cases in C++ where function calls are involved. */
1743
1744 basic_block
1745 single_noncomplex_succ (basic_block bb)
1746 {
1747 edge e0, e1;
1748 if (EDGE_COUNT (bb->succs) != 2)
1749 return bb;
1750
1751 e0 = EDGE_SUCC (bb, 0);
1752 e1 = EDGE_SUCC (bb, 1);
1753 if (e0->flags & EDGE_COMPLEX)
1754 return e1->dest;
1755 if (e1->flags & EDGE_COMPLEX)
1756 return e0->dest;
1757
1758 return bb;
1759 }
1760
1761 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1762
1763 void
1764 notice_special_calls (gimple call)
1765 {
1766 int flags = gimple_call_flags (call);
1767
1768 if (flags & ECF_MAY_BE_ALLOCA)
1769 cfun->calls_alloca = true;
1770 if (flags & ECF_RETURNS_TWICE)
1771 cfun->calls_setjmp = true;
1772 }
1773
1774
1775 /* Clear flags set by notice_special_calls. Used by dead code removal
1776 to update the flags. */
1777
1778 void
1779 clear_special_calls (void)
1780 {
1781 cfun->calls_alloca = false;
1782 cfun->calls_setjmp = false;
1783 }
1784
1785 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
1786
1787 static void
1788 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
1789 {
1790 /* Since this block is no longer reachable, we can just delete all
1791 of its PHI nodes. */
1792 remove_phi_nodes (bb);
1793
1794 /* Remove edges to BB's successors. */
1795 while (EDGE_COUNT (bb->succs) > 0)
1796 remove_edge (EDGE_SUCC (bb, 0));
1797 }
1798
1799
1800 /* Remove statements of basic block BB. */
1801
1802 static void
1803 remove_bb (basic_block bb)
1804 {
1805 gimple_stmt_iterator i;
1806
1807 if (dump_file)
1808 {
1809 fprintf (dump_file, "Removing basic block %d\n", bb->index);
1810 if (dump_flags & TDF_DETAILS)
1811 {
1812 dump_bb (dump_file, bb, 0, dump_flags);
1813 fprintf (dump_file, "\n");
1814 }
1815 }
1816
1817 if (current_loops)
1818 {
1819 struct loop *loop = bb->loop_father;
1820
1821 /* If a loop gets removed, clean up the information associated
1822 with it. */
1823 if (loop->latch == bb
1824 || loop->header == bb)
1825 free_numbers_of_iterations_estimates_loop (loop);
1826 }
1827
1828 /* Remove all the instructions in the block. */
1829 if (bb_seq (bb) != NULL)
1830 {
1831 /* Walk backwards so as to get a chance to substitute all
1832 released DEFs into debug stmts. See
1833 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
1834 details. */
1835 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
1836 {
1837 gimple stmt = gsi_stmt (i);
1838 if (gimple_code (stmt) == GIMPLE_LABEL
1839 && (FORCED_LABEL (gimple_label_label (stmt))
1840 || DECL_NONLOCAL (gimple_label_label (stmt))))
1841 {
1842 basic_block new_bb;
1843 gimple_stmt_iterator new_gsi;
1844
1845 /* A non-reachable non-local label may still be referenced.
1846 But it no longer needs to carry the extra semantics of
1847 non-locality. */
1848 if (DECL_NONLOCAL (gimple_label_label (stmt)))
1849 {
1850 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
1851 FORCED_LABEL (gimple_label_label (stmt)) = 1;
1852 }
1853
1854 new_bb = bb->prev_bb;
1855 new_gsi = gsi_start_bb (new_bb);
1856 gsi_remove (&i, false);
1857 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
1858 }
1859 else
1860 {
1861 /* Release SSA definitions if we are in SSA. Note that we
1862 may be called when not in SSA. For example,
1863 final_cleanup calls this function via
1864 cleanup_tree_cfg. */
1865 if (gimple_in_ssa_p (cfun))
1866 release_defs (stmt);
1867
1868 gsi_remove (&i, true);
1869 }
1870
1871 if (gsi_end_p (i))
1872 i = gsi_last_bb (bb);
1873 else
1874 gsi_prev (&i);
1875 }
1876 }
1877
1878 remove_phi_nodes_and_edges_for_unreachable_block (bb);
1879 bb->il.gimple.seq = NULL;
1880 bb->il.gimple.phi_nodes = NULL;
1881 }
1882
1883
1884 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
1885 predicate VAL, return the edge that will be taken out of the block.
1886 If VAL does not match a unique edge, NULL is returned. */
1887
1888 edge
1889 find_taken_edge (basic_block bb, tree val)
1890 {
1891 gimple stmt;
1892
1893 stmt = last_stmt (bb);
1894
1895 gcc_assert (stmt);
1896 gcc_assert (is_ctrl_stmt (stmt));
1897
1898 if (val == NULL)
1899 return NULL;
1900
1901 if (!is_gimple_min_invariant (val))
1902 return NULL;
1903
1904 if (gimple_code (stmt) == GIMPLE_COND)
1905 return find_taken_edge_cond_expr (bb, val);
1906
1907 if (gimple_code (stmt) == GIMPLE_SWITCH)
1908 return find_taken_edge_switch_expr (bb, val);
1909
1910 if (computed_goto_p (stmt))
1911 {
1912 /* Only optimize if the argument is a label, if the argument is
1913 not a label then we can not construct a proper CFG.
1914
1915 It may be the case that we only need to allow the LABEL_REF to
1916 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
1917 appear inside a LABEL_EXPR just to be safe. */
1918 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
1919 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
1920 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
1921 return NULL;
1922 }
1923
1924 gcc_unreachable ();
1925 }
1926
1927 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
1928 statement, determine which of the outgoing edges will be taken out of the
1929 block. Return NULL if either edge may be taken. */
1930
1931 static edge
1932 find_taken_edge_computed_goto (basic_block bb, tree val)
1933 {
1934 basic_block dest;
1935 edge e = NULL;
1936
1937 dest = label_to_block (val);
1938 if (dest)
1939 {
1940 e = find_edge (bb, dest);
1941 gcc_assert (e != NULL);
1942 }
1943
1944 return e;
1945 }
1946
1947 /* Given a constant value VAL and the entry block BB to a COND_EXPR
1948 statement, determine which of the two edges will be taken out of the
1949 block. Return NULL if either edge may be taken. */
1950
1951 static edge
1952 find_taken_edge_cond_expr (basic_block bb, tree val)
1953 {
1954 edge true_edge, false_edge;
1955
1956 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1957
1958 gcc_assert (TREE_CODE (val) == INTEGER_CST);
1959 return (integer_zerop (val) ? false_edge : true_edge);
1960 }
1961
1962 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
1963 statement, determine which edge will be taken out of the block. Return
1964 NULL if any edge may be taken. */
1965
1966 static edge
1967 find_taken_edge_switch_expr (basic_block bb, tree val)
1968 {
1969 basic_block dest_bb;
1970 edge e;
1971 gimple switch_stmt;
1972 tree taken_case;
1973
1974 switch_stmt = last_stmt (bb);
1975 taken_case = find_case_label_for_value (switch_stmt, val);
1976 dest_bb = label_to_block (CASE_LABEL (taken_case));
1977
1978 e = find_edge (bb, dest_bb);
1979 gcc_assert (e);
1980 return e;
1981 }
1982
1983
1984 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
1985 We can make optimal use here of the fact that the case labels are
1986 sorted: We can do a binary search for a case matching VAL. */
1987
1988 static tree
1989 find_case_label_for_value (gimple switch_stmt, tree val)
1990 {
1991 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
1992 tree default_case = gimple_switch_default_label (switch_stmt);
1993
1994 for (low = 0, high = n; high - low > 1; )
1995 {
1996 size_t i = (high + low) / 2;
1997 tree t = gimple_switch_label (switch_stmt, i);
1998 int cmp;
1999
2000 /* Cache the result of comparing CASE_LOW and val. */
2001 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2002
2003 if (cmp > 0)
2004 high = i;
2005 else
2006 low = i;
2007
2008 if (CASE_HIGH (t) == NULL)
2009 {
2010 /* A singe-valued case label. */
2011 if (cmp == 0)
2012 return t;
2013 }
2014 else
2015 {
2016 /* A case range. We can only handle integer ranges. */
2017 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2018 return t;
2019 }
2020 }
2021
2022 return default_case;
2023 }
2024
2025
2026 /* Dump a basic block on stderr. */
2027
2028 void
2029 gimple_debug_bb (basic_block bb)
2030 {
2031 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2032 }
2033
2034
2035 /* Dump basic block with index N on stderr. */
2036
2037 basic_block
2038 gimple_debug_bb_n (int n)
2039 {
2040 gimple_debug_bb (BASIC_BLOCK (n));
2041 return BASIC_BLOCK (n);
2042 }
2043
2044
2045 /* Dump the CFG on stderr.
2046
2047 FLAGS are the same used by the tree dumping functions
2048 (see TDF_* in dumpfile.h). */
2049
2050 void
2051 gimple_debug_cfg (int flags)
2052 {
2053 gimple_dump_cfg (stderr, flags);
2054 }
2055
2056
2057 /* Dump the program showing basic block boundaries on the given FILE.
2058
2059 FLAGS are the same used by the tree dumping functions (see TDF_* in
2060 tree.h). */
2061
2062 void
2063 gimple_dump_cfg (FILE *file, int flags)
2064 {
2065 if (flags & TDF_DETAILS)
2066 {
2067 dump_function_header (file, current_function_decl, flags);
2068 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2069 n_basic_blocks, n_edges, last_basic_block);
2070
2071 brief_dump_cfg (file, flags | TDF_COMMENT);
2072 fprintf (file, "\n");
2073 }
2074
2075 if (flags & TDF_STATS)
2076 dump_cfg_stats (file);
2077
2078 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2079 }
2080
2081
2082 /* Dump CFG statistics on FILE. */
2083
2084 void
2085 dump_cfg_stats (FILE *file)
2086 {
2087 static long max_num_merged_labels = 0;
2088 unsigned long size, total = 0;
2089 long num_edges;
2090 basic_block bb;
2091 const char * const fmt_str = "%-30s%-13s%12s\n";
2092 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2093 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2094 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2095 const char *funcname = current_function_name ();
2096
2097 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2098
2099 fprintf (file, "---------------------------------------------------------\n");
2100 fprintf (file, fmt_str, "", " Number of ", "Memory");
2101 fprintf (file, fmt_str, "", " instances ", "used ");
2102 fprintf (file, "---------------------------------------------------------\n");
2103
2104 size = n_basic_blocks * sizeof (struct basic_block_def);
2105 total += size;
2106 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks,
2107 SCALE (size), LABEL (size));
2108
2109 num_edges = 0;
2110 FOR_EACH_BB (bb)
2111 num_edges += EDGE_COUNT (bb->succs);
2112 size = num_edges * sizeof (struct edge_def);
2113 total += size;
2114 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2115
2116 fprintf (file, "---------------------------------------------------------\n");
2117 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2118 LABEL (total));
2119 fprintf (file, "---------------------------------------------------------\n");
2120 fprintf (file, "\n");
2121
2122 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2123 max_num_merged_labels = cfg_stats.num_merged_labels;
2124
2125 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2126 cfg_stats.num_merged_labels, max_num_merged_labels);
2127
2128 fprintf (file, "\n");
2129 }
2130
2131
2132 /* Dump CFG statistics on stderr. Keep extern so that it's always
2133 linked in the final executable. */
2134
2135 DEBUG_FUNCTION void
2136 debug_cfg_stats (void)
2137 {
2138 dump_cfg_stats (stderr);
2139 }
2140
2141 /*---------------------------------------------------------------------------
2142 Miscellaneous helpers
2143 ---------------------------------------------------------------------------*/
2144
2145 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2146 flow. Transfers of control flow associated with EH are excluded. */
2147
2148 static bool
2149 call_can_make_abnormal_goto (gimple t)
2150 {
2151 /* If the function has no non-local labels, then a call cannot make an
2152 abnormal transfer of control. */
2153 if (!cfun->has_nonlocal_label
2154 && !cfun->calls_setjmp)
2155 return false;
2156
2157 /* Likewise if the call has no side effects. */
2158 if (!gimple_has_side_effects (t))
2159 return false;
2160
2161 /* Likewise if the called function is leaf. */
2162 if (gimple_call_flags (t) & ECF_LEAF)
2163 return false;
2164
2165 return true;
2166 }
2167
2168
2169 /* Return true if T can make an abnormal transfer of control flow.
2170 Transfers of control flow associated with EH are excluded. */
2171
2172 bool
2173 stmt_can_make_abnormal_goto (gimple t)
2174 {
2175 if (computed_goto_p (t))
2176 return true;
2177 if (is_gimple_call (t))
2178 return call_can_make_abnormal_goto (t);
2179 return false;
2180 }
2181
2182
2183 /* Return true if T represents a stmt that always transfers control. */
2184
2185 bool
2186 is_ctrl_stmt (gimple t)
2187 {
2188 switch (gimple_code (t))
2189 {
2190 case GIMPLE_COND:
2191 case GIMPLE_SWITCH:
2192 case GIMPLE_GOTO:
2193 case GIMPLE_RETURN:
2194 case GIMPLE_RESX:
2195 return true;
2196 default:
2197 return false;
2198 }
2199 }
2200
2201
2202 /* Return true if T is a statement that may alter the flow of control
2203 (e.g., a call to a non-returning function). */
2204
2205 bool
2206 is_ctrl_altering_stmt (gimple t)
2207 {
2208 gcc_assert (t);
2209
2210 switch (gimple_code (t))
2211 {
2212 case GIMPLE_CALL:
2213 {
2214 int flags = gimple_call_flags (t);
2215
2216 /* A call alters control flow if it can make an abnormal goto. */
2217 if (call_can_make_abnormal_goto (t))
2218 return true;
2219
2220 /* A call also alters control flow if it does not return. */
2221 if (flags & ECF_NORETURN)
2222 return true;
2223
2224 /* TM ending statements have backedges out of the transaction.
2225 Return true so we split the basic block containing them.
2226 Note that the TM_BUILTIN test is merely an optimization. */
2227 if ((flags & ECF_TM_BUILTIN)
2228 && is_tm_ending_fndecl (gimple_call_fndecl (t)))
2229 return true;
2230
2231 /* BUILT_IN_RETURN call is same as return statement. */
2232 if (gimple_call_builtin_p (t, BUILT_IN_RETURN))
2233 return true;
2234 }
2235 break;
2236
2237 case GIMPLE_EH_DISPATCH:
2238 /* EH_DISPATCH branches to the individual catch handlers at
2239 this level of a try or allowed-exceptions region. It can
2240 fallthru to the next statement as well. */
2241 return true;
2242
2243 case GIMPLE_ASM:
2244 if (gimple_asm_nlabels (t) > 0)
2245 return true;
2246 break;
2247
2248 CASE_GIMPLE_OMP:
2249 /* OpenMP directives alter control flow. */
2250 return true;
2251
2252 case GIMPLE_TRANSACTION:
2253 /* A transaction start alters control flow. */
2254 return true;
2255
2256 default:
2257 break;
2258 }
2259
2260 /* If a statement can throw, it alters control flow. */
2261 return stmt_can_throw_internal (t);
2262 }
2263
2264
2265 /* Return true if T is a simple local goto. */
2266
2267 bool
2268 simple_goto_p (gimple t)
2269 {
2270 return (gimple_code (t) == GIMPLE_GOTO
2271 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2272 }
2273
2274
2275 /* Return true if STMT should start a new basic block. PREV_STMT is
2276 the statement preceding STMT. It is used when STMT is a label or a
2277 case label. Labels should only start a new basic block if their
2278 previous statement wasn't a label. Otherwise, sequence of labels
2279 would generate unnecessary basic blocks that only contain a single
2280 label. */
2281
2282 static inline bool
2283 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2284 {
2285 if (stmt == NULL)
2286 return false;
2287
2288 /* Labels start a new basic block only if the preceding statement
2289 wasn't a label of the same type. This prevents the creation of
2290 consecutive blocks that have nothing but a single label. */
2291 if (gimple_code (stmt) == GIMPLE_LABEL)
2292 {
2293 /* Nonlocal and computed GOTO targets always start a new block. */
2294 if (DECL_NONLOCAL (gimple_label_label (stmt))
2295 || FORCED_LABEL (gimple_label_label (stmt)))
2296 return true;
2297
2298 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2299 {
2300 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2301 return true;
2302
2303 cfg_stats.num_merged_labels++;
2304 return false;
2305 }
2306 else
2307 return true;
2308 }
2309 else if (gimple_code (stmt) == GIMPLE_CALL
2310 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2311 /* setjmp acts similar to a nonlocal GOTO target and thus should
2312 start a new block. */
2313 return true;
2314
2315 return false;
2316 }
2317
2318
2319 /* Return true if T should end a basic block. */
2320
2321 bool
2322 stmt_ends_bb_p (gimple t)
2323 {
2324 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2325 }
2326
2327 /* Remove block annotations and other data structures. */
2328
2329 void
2330 delete_tree_cfg_annotations (void)
2331 {
2332 vec_free (label_to_block_map);
2333 }
2334
2335
2336 /* Return the first statement in basic block BB. */
2337
2338 gimple
2339 first_stmt (basic_block bb)
2340 {
2341 gimple_stmt_iterator i = gsi_start_bb (bb);
2342 gimple stmt = NULL;
2343
2344 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2345 {
2346 gsi_next (&i);
2347 stmt = NULL;
2348 }
2349 return stmt;
2350 }
2351
2352 /* Return the first non-label statement in basic block BB. */
2353
2354 static gimple
2355 first_non_label_stmt (basic_block bb)
2356 {
2357 gimple_stmt_iterator i = gsi_start_bb (bb);
2358 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2359 gsi_next (&i);
2360 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2361 }
2362
2363 /* Return the last statement in basic block BB. */
2364
2365 gimple
2366 last_stmt (basic_block bb)
2367 {
2368 gimple_stmt_iterator i = gsi_last_bb (bb);
2369 gimple stmt = NULL;
2370
2371 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2372 {
2373 gsi_prev (&i);
2374 stmt = NULL;
2375 }
2376 return stmt;
2377 }
2378
2379 /* Return the last statement of an otherwise empty block. Return NULL
2380 if the block is totally empty, or if it contains more than one
2381 statement. */
2382
2383 gimple
2384 last_and_only_stmt (basic_block bb)
2385 {
2386 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2387 gimple last, prev;
2388
2389 if (gsi_end_p (i))
2390 return NULL;
2391
2392 last = gsi_stmt (i);
2393 gsi_prev_nondebug (&i);
2394 if (gsi_end_p (i))
2395 return last;
2396
2397 /* Empty statements should no longer appear in the instruction stream.
2398 Everything that might have appeared before should be deleted by
2399 remove_useless_stmts, and the optimizers should just gsi_remove
2400 instead of smashing with build_empty_stmt.
2401
2402 Thus the only thing that should appear here in a block containing
2403 one executable statement is a label. */
2404 prev = gsi_stmt (i);
2405 if (gimple_code (prev) == GIMPLE_LABEL)
2406 return last;
2407 else
2408 return NULL;
2409 }
2410
2411 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2412
2413 static void
2414 reinstall_phi_args (edge new_edge, edge old_edge)
2415 {
2416 edge_var_map_vector *v;
2417 edge_var_map *vm;
2418 int i;
2419 gimple_stmt_iterator phis;
2420
2421 v = redirect_edge_var_map_vector (old_edge);
2422 if (!v)
2423 return;
2424
2425 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2426 v->iterate (i, &vm) && !gsi_end_p (phis);
2427 i++, gsi_next (&phis))
2428 {
2429 gimple phi = gsi_stmt (phis);
2430 tree result = redirect_edge_var_map_result (vm);
2431 tree arg = redirect_edge_var_map_def (vm);
2432
2433 gcc_assert (result == gimple_phi_result (phi));
2434
2435 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2436 }
2437
2438 redirect_edge_var_map_clear (old_edge);
2439 }
2440
2441 /* Returns the basic block after which the new basic block created
2442 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2443 near its "logical" location. This is of most help to humans looking
2444 at debugging dumps. */
2445
2446 static basic_block
2447 split_edge_bb_loc (edge edge_in)
2448 {
2449 basic_block dest = edge_in->dest;
2450 basic_block dest_prev = dest->prev_bb;
2451
2452 if (dest_prev)
2453 {
2454 edge e = find_edge (dest_prev, dest);
2455 if (e && !(e->flags & EDGE_COMPLEX))
2456 return edge_in->src;
2457 }
2458 return dest_prev;
2459 }
2460
2461 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2462 Abort on abnormal edges. */
2463
2464 static basic_block
2465 gimple_split_edge (edge edge_in)
2466 {
2467 basic_block new_bb, after_bb, dest;
2468 edge new_edge, e;
2469
2470 /* Abnormal edges cannot be split. */
2471 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2472
2473 dest = edge_in->dest;
2474
2475 after_bb = split_edge_bb_loc (edge_in);
2476
2477 new_bb = create_empty_bb (after_bb);
2478 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2479 new_bb->count = edge_in->count;
2480 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2481 new_edge->probability = REG_BR_PROB_BASE;
2482 new_edge->count = edge_in->count;
2483
2484 e = redirect_edge_and_branch (edge_in, new_bb);
2485 gcc_assert (e == edge_in);
2486 reinstall_phi_args (new_edge, e);
2487
2488 return new_bb;
2489 }
2490
2491
2492 /* Verify properties of the address expression T with base object BASE. */
2493
2494 static tree
2495 verify_address (tree t, tree base)
2496 {
2497 bool old_constant;
2498 bool old_side_effects;
2499 bool new_constant;
2500 bool new_side_effects;
2501
2502 old_constant = TREE_CONSTANT (t);
2503 old_side_effects = TREE_SIDE_EFFECTS (t);
2504
2505 recompute_tree_invariant_for_addr_expr (t);
2506 new_side_effects = TREE_SIDE_EFFECTS (t);
2507 new_constant = TREE_CONSTANT (t);
2508
2509 if (old_constant != new_constant)
2510 {
2511 error ("constant not recomputed when ADDR_EXPR changed");
2512 return t;
2513 }
2514 if (old_side_effects != new_side_effects)
2515 {
2516 error ("side effects not recomputed when ADDR_EXPR changed");
2517 return t;
2518 }
2519
2520 if (!(TREE_CODE (base) == VAR_DECL
2521 || TREE_CODE (base) == PARM_DECL
2522 || TREE_CODE (base) == RESULT_DECL))
2523 return NULL_TREE;
2524
2525 if (DECL_GIMPLE_REG_P (base))
2526 {
2527 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2528 return base;
2529 }
2530
2531 return NULL_TREE;
2532 }
2533
2534 /* Callback for walk_tree, check that all elements with address taken are
2535 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2536 inside a PHI node. */
2537
2538 static tree
2539 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2540 {
2541 tree t = *tp, x;
2542
2543 if (TYPE_P (t))
2544 *walk_subtrees = 0;
2545
2546 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2547 #define CHECK_OP(N, MSG) \
2548 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2549 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2550
2551 switch (TREE_CODE (t))
2552 {
2553 case SSA_NAME:
2554 if (SSA_NAME_IN_FREE_LIST (t))
2555 {
2556 error ("SSA name in freelist but still referenced");
2557 return *tp;
2558 }
2559 break;
2560
2561 case INDIRECT_REF:
2562 error ("INDIRECT_REF in gimple IL");
2563 return t;
2564
2565 case MEM_REF:
2566 x = TREE_OPERAND (t, 0);
2567 if (!POINTER_TYPE_P (TREE_TYPE (x))
2568 || !is_gimple_mem_ref_addr (x))
2569 {
2570 error ("invalid first operand of MEM_REF");
2571 return x;
2572 }
2573 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2574 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2575 {
2576 error ("invalid offset operand of MEM_REF");
2577 return TREE_OPERAND (t, 1);
2578 }
2579 if (TREE_CODE (x) == ADDR_EXPR
2580 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2581 return x;
2582 *walk_subtrees = 0;
2583 break;
2584
2585 case ASSERT_EXPR:
2586 x = fold (ASSERT_EXPR_COND (t));
2587 if (x == boolean_false_node)
2588 {
2589 error ("ASSERT_EXPR with an always-false condition");
2590 return *tp;
2591 }
2592 break;
2593
2594 case MODIFY_EXPR:
2595 error ("MODIFY_EXPR not expected while having tuples");
2596 return *tp;
2597
2598 case ADDR_EXPR:
2599 {
2600 tree tem;
2601
2602 gcc_assert (is_gimple_address (t));
2603
2604 /* Skip any references (they will be checked when we recurse down the
2605 tree) and ensure that any variable used as a prefix is marked
2606 addressable. */
2607 for (x = TREE_OPERAND (t, 0);
2608 handled_component_p (x);
2609 x = TREE_OPERAND (x, 0))
2610 ;
2611
2612 if ((tem = verify_address (t, x)))
2613 return tem;
2614
2615 if (!(TREE_CODE (x) == VAR_DECL
2616 || TREE_CODE (x) == PARM_DECL
2617 || TREE_CODE (x) == RESULT_DECL))
2618 return NULL;
2619
2620 if (!TREE_ADDRESSABLE (x))
2621 {
2622 error ("address taken, but ADDRESSABLE bit not set");
2623 return x;
2624 }
2625
2626 break;
2627 }
2628
2629 case COND_EXPR:
2630 x = COND_EXPR_COND (t);
2631 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2632 {
2633 error ("non-integral used in condition");
2634 return x;
2635 }
2636 if (!is_gimple_condexpr (x))
2637 {
2638 error ("invalid conditional operand");
2639 return x;
2640 }
2641 break;
2642
2643 case NON_LVALUE_EXPR:
2644 case TRUTH_NOT_EXPR:
2645 gcc_unreachable ();
2646
2647 CASE_CONVERT:
2648 case FIX_TRUNC_EXPR:
2649 case FLOAT_EXPR:
2650 case NEGATE_EXPR:
2651 case ABS_EXPR:
2652 case BIT_NOT_EXPR:
2653 CHECK_OP (0, "invalid operand to unary operator");
2654 break;
2655
2656 case REALPART_EXPR:
2657 case IMAGPART_EXPR:
2658 case BIT_FIELD_REF:
2659 if (!is_gimple_reg_type (TREE_TYPE (t)))
2660 {
2661 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2662 return t;
2663 }
2664
2665 if (TREE_CODE (t) == BIT_FIELD_REF)
2666 {
2667 if (!host_integerp (TREE_OPERAND (t, 1), 1)
2668 || !host_integerp (TREE_OPERAND (t, 2), 1))
2669 {
2670 error ("invalid position or size operand to BIT_FIELD_REF");
2671 return t;
2672 }
2673 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2674 && (TYPE_PRECISION (TREE_TYPE (t))
2675 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2676 {
2677 error ("integral result type precision does not match "
2678 "field size of BIT_FIELD_REF");
2679 return t;
2680 }
2681 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2682 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2683 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2684 != TREE_INT_CST_LOW (TREE_OPERAND (t, 1))))
2685 {
2686 error ("mode precision of non-integral result does not "
2687 "match field size of BIT_FIELD_REF");
2688 return t;
2689 }
2690 }
2691 t = TREE_OPERAND (t, 0);
2692
2693 /* Fall-through. */
2694 case COMPONENT_REF:
2695 case ARRAY_REF:
2696 case ARRAY_RANGE_REF:
2697 case VIEW_CONVERT_EXPR:
2698 /* We have a nest of references. Verify that each of the operands
2699 that determine where to reference is either a constant or a variable,
2700 verify that the base is valid, and then show we've already checked
2701 the subtrees. */
2702 while (handled_component_p (t))
2703 {
2704 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2705 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2706 else if (TREE_CODE (t) == ARRAY_REF
2707 || TREE_CODE (t) == ARRAY_RANGE_REF)
2708 {
2709 CHECK_OP (1, "invalid array index");
2710 if (TREE_OPERAND (t, 2))
2711 CHECK_OP (2, "invalid array lower bound");
2712 if (TREE_OPERAND (t, 3))
2713 CHECK_OP (3, "invalid array stride");
2714 }
2715 else if (TREE_CODE (t) == BIT_FIELD_REF
2716 || TREE_CODE (t) == REALPART_EXPR
2717 || TREE_CODE (t) == IMAGPART_EXPR)
2718 {
2719 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2720 "REALPART_EXPR");
2721 return t;
2722 }
2723
2724 t = TREE_OPERAND (t, 0);
2725 }
2726
2727 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2728 {
2729 error ("invalid reference prefix");
2730 return t;
2731 }
2732 *walk_subtrees = 0;
2733 break;
2734 case PLUS_EXPR:
2735 case MINUS_EXPR:
2736 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2737 POINTER_PLUS_EXPR. */
2738 if (POINTER_TYPE_P (TREE_TYPE (t)))
2739 {
2740 error ("invalid operand to plus/minus, type is a pointer");
2741 return t;
2742 }
2743 CHECK_OP (0, "invalid operand to binary operator");
2744 CHECK_OP (1, "invalid operand to binary operator");
2745 break;
2746
2747 case POINTER_PLUS_EXPR:
2748 /* Check to make sure the first operand is a pointer or reference type. */
2749 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2750 {
2751 error ("invalid operand to pointer plus, first operand is not a pointer");
2752 return t;
2753 }
2754 /* Check to make sure the second operand is a ptrofftype. */
2755 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2756 {
2757 error ("invalid operand to pointer plus, second operand is not an "
2758 "integer type of appropriate width");
2759 return t;
2760 }
2761 /* FALLTHROUGH */
2762 case LT_EXPR:
2763 case LE_EXPR:
2764 case GT_EXPR:
2765 case GE_EXPR:
2766 case EQ_EXPR:
2767 case NE_EXPR:
2768 case UNORDERED_EXPR:
2769 case ORDERED_EXPR:
2770 case UNLT_EXPR:
2771 case UNLE_EXPR:
2772 case UNGT_EXPR:
2773 case UNGE_EXPR:
2774 case UNEQ_EXPR:
2775 case LTGT_EXPR:
2776 case MULT_EXPR:
2777 case TRUNC_DIV_EXPR:
2778 case CEIL_DIV_EXPR:
2779 case FLOOR_DIV_EXPR:
2780 case ROUND_DIV_EXPR:
2781 case TRUNC_MOD_EXPR:
2782 case CEIL_MOD_EXPR:
2783 case FLOOR_MOD_EXPR:
2784 case ROUND_MOD_EXPR:
2785 case RDIV_EXPR:
2786 case EXACT_DIV_EXPR:
2787 case MIN_EXPR:
2788 case MAX_EXPR:
2789 case LSHIFT_EXPR:
2790 case RSHIFT_EXPR:
2791 case LROTATE_EXPR:
2792 case RROTATE_EXPR:
2793 case BIT_IOR_EXPR:
2794 case BIT_XOR_EXPR:
2795 case BIT_AND_EXPR:
2796 CHECK_OP (0, "invalid operand to binary operator");
2797 CHECK_OP (1, "invalid operand to binary operator");
2798 break;
2799
2800 case CONSTRUCTOR:
2801 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2802 *walk_subtrees = 0;
2803 break;
2804
2805 case CASE_LABEL_EXPR:
2806 if (CASE_CHAIN (t))
2807 {
2808 error ("invalid CASE_CHAIN");
2809 return t;
2810 }
2811 break;
2812
2813 default:
2814 break;
2815 }
2816 return NULL;
2817
2818 #undef CHECK_OP
2819 }
2820
2821
2822 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2823 Returns true if there is an error, otherwise false. */
2824
2825 static bool
2826 verify_types_in_gimple_min_lval (tree expr)
2827 {
2828 tree op;
2829
2830 if (is_gimple_id (expr))
2831 return false;
2832
2833 if (TREE_CODE (expr) != TARGET_MEM_REF
2834 && TREE_CODE (expr) != MEM_REF)
2835 {
2836 error ("invalid expression for min lvalue");
2837 return true;
2838 }
2839
2840 /* TARGET_MEM_REFs are strange beasts. */
2841 if (TREE_CODE (expr) == TARGET_MEM_REF)
2842 return false;
2843
2844 op = TREE_OPERAND (expr, 0);
2845 if (!is_gimple_val (op))
2846 {
2847 error ("invalid operand in indirect reference");
2848 debug_generic_stmt (op);
2849 return true;
2850 }
2851 /* Memory references now generally can involve a value conversion. */
2852
2853 return false;
2854 }
2855
2856 /* Verify if EXPR is a valid GIMPLE reference expression. If
2857 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2858 if there is an error, otherwise false. */
2859
2860 static bool
2861 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2862 {
2863 while (handled_component_p (expr))
2864 {
2865 tree op = TREE_OPERAND (expr, 0);
2866
2867 if (TREE_CODE (expr) == ARRAY_REF
2868 || TREE_CODE (expr) == ARRAY_RANGE_REF)
2869 {
2870 if (!is_gimple_val (TREE_OPERAND (expr, 1))
2871 || (TREE_OPERAND (expr, 2)
2872 && !is_gimple_val (TREE_OPERAND (expr, 2)))
2873 || (TREE_OPERAND (expr, 3)
2874 && !is_gimple_val (TREE_OPERAND (expr, 3))))
2875 {
2876 error ("invalid operands to array reference");
2877 debug_generic_stmt (expr);
2878 return true;
2879 }
2880 }
2881
2882 /* Verify if the reference array element types are compatible. */
2883 if (TREE_CODE (expr) == ARRAY_REF
2884 && !useless_type_conversion_p (TREE_TYPE (expr),
2885 TREE_TYPE (TREE_TYPE (op))))
2886 {
2887 error ("type mismatch in array reference");
2888 debug_generic_stmt (TREE_TYPE (expr));
2889 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2890 return true;
2891 }
2892 if (TREE_CODE (expr) == ARRAY_RANGE_REF
2893 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
2894 TREE_TYPE (TREE_TYPE (op))))
2895 {
2896 error ("type mismatch in array range reference");
2897 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
2898 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2899 return true;
2900 }
2901
2902 if ((TREE_CODE (expr) == REALPART_EXPR
2903 || TREE_CODE (expr) == IMAGPART_EXPR)
2904 && !useless_type_conversion_p (TREE_TYPE (expr),
2905 TREE_TYPE (TREE_TYPE (op))))
2906 {
2907 error ("type mismatch in real/imagpart reference");
2908 debug_generic_stmt (TREE_TYPE (expr));
2909 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
2910 return true;
2911 }
2912
2913 if (TREE_CODE (expr) == COMPONENT_REF
2914 && !useless_type_conversion_p (TREE_TYPE (expr),
2915 TREE_TYPE (TREE_OPERAND (expr, 1))))
2916 {
2917 error ("type mismatch in component reference");
2918 debug_generic_stmt (TREE_TYPE (expr));
2919 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
2920 return true;
2921 }
2922
2923 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2924 {
2925 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
2926 that their operand is not an SSA name or an invariant when
2927 requiring an lvalue (this usually means there is a SRA or IPA-SRA
2928 bug). Otherwise there is nothing to verify, gross mismatches at
2929 most invoke undefined behavior. */
2930 if (require_lvalue
2931 && (TREE_CODE (op) == SSA_NAME
2932 || is_gimple_min_invariant (op)))
2933 {
2934 error ("conversion of an SSA_NAME on the left hand side");
2935 debug_generic_stmt (expr);
2936 return true;
2937 }
2938 else if (TREE_CODE (op) == SSA_NAME
2939 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
2940 {
2941 error ("conversion of register to a different size");
2942 debug_generic_stmt (expr);
2943 return true;
2944 }
2945 else if (!handled_component_p (op))
2946 return false;
2947 }
2948
2949 expr = op;
2950 }
2951
2952 if (TREE_CODE (expr) == MEM_REF)
2953 {
2954 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
2955 {
2956 error ("invalid address operand in MEM_REF");
2957 debug_generic_stmt (expr);
2958 return true;
2959 }
2960 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
2961 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
2962 {
2963 error ("invalid offset operand in MEM_REF");
2964 debug_generic_stmt (expr);
2965 return true;
2966 }
2967 }
2968 else if (TREE_CODE (expr) == TARGET_MEM_REF)
2969 {
2970 if (!TMR_BASE (expr)
2971 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
2972 {
2973 error ("invalid address operand in TARGET_MEM_REF");
2974 return true;
2975 }
2976 if (!TMR_OFFSET (expr)
2977 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
2978 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
2979 {
2980 error ("invalid offset operand in TARGET_MEM_REF");
2981 debug_generic_stmt (expr);
2982 return true;
2983 }
2984 }
2985
2986 return ((require_lvalue || !is_gimple_min_invariant (expr))
2987 && verify_types_in_gimple_min_lval (expr));
2988 }
2989
2990 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
2991 list of pointer-to types that is trivially convertible to DEST. */
2992
2993 static bool
2994 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
2995 {
2996 tree src;
2997
2998 if (!TYPE_POINTER_TO (src_obj))
2999 return true;
3000
3001 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3002 if (useless_type_conversion_p (dest, src))
3003 return true;
3004
3005 return false;
3006 }
3007
3008 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3009 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3010
3011 static bool
3012 valid_fixed_convert_types_p (tree type1, tree type2)
3013 {
3014 return (FIXED_POINT_TYPE_P (type1)
3015 && (INTEGRAL_TYPE_P (type2)
3016 || SCALAR_FLOAT_TYPE_P (type2)
3017 || FIXED_POINT_TYPE_P (type2)));
3018 }
3019
3020 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3021 is a problem, otherwise false. */
3022
3023 static bool
3024 verify_gimple_call (gimple stmt)
3025 {
3026 tree fn = gimple_call_fn (stmt);
3027 tree fntype, fndecl;
3028 unsigned i;
3029
3030 if (gimple_call_internal_p (stmt))
3031 {
3032 if (fn)
3033 {
3034 error ("gimple call has two targets");
3035 debug_generic_stmt (fn);
3036 return true;
3037 }
3038 }
3039 else
3040 {
3041 if (!fn)
3042 {
3043 error ("gimple call has no target");
3044 return true;
3045 }
3046 }
3047
3048 if (fn && !is_gimple_call_addr (fn))
3049 {
3050 error ("invalid function in gimple call");
3051 debug_generic_stmt (fn);
3052 return true;
3053 }
3054
3055 if (fn
3056 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3057 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3058 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3059 {
3060 error ("non-function in gimple call");
3061 return true;
3062 }
3063
3064 fndecl = gimple_call_fndecl (stmt);
3065 if (fndecl
3066 && TREE_CODE (fndecl) == FUNCTION_DECL
3067 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3068 && !DECL_PURE_P (fndecl)
3069 && !TREE_READONLY (fndecl))
3070 {
3071 error ("invalid pure const state for function");
3072 return true;
3073 }
3074
3075 if (gimple_call_lhs (stmt)
3076 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3077 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3078 {
3079 error ("invalid LHS in gimple call");
3080 return true;
3081 }
3082
3083 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3084 {
3085 error ("LHS in noreturn call");
3086 return true;
3087 }
3088
3089 fntype = gimple_call_fntype (stmt);
3090 if (fntype
3091 && gimple_call_lhs (stmt)
3092 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3093 TREE_TYPE (fntype))
3094 /* ??? At least C++ misses conversions at assignments from
3095 void * call results.
3096 ??? Java is completely off. Especially with functions
3097 returning java.lang.Object.
3098 For now simply allow arbitrary pointer type conversions. */
3099 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3100 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3101 {
3102 error ("invalid conversion in gimple call");
3103 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3104 debug_generic_stmt (TREE_TYPE (fntype));
3105 return true;
3106 }
3107
3108 if (gimple_call_chain (stmt)
3109 && !is_gimple_val (gimple_call_chain (stmt)))
3110 {
3111 error ("invalid static chain in gimple call");
3112 debug_generic_stmt (gimple_call_chain (stmt));
3113 return true;
3114 }
3115
3116 /* If there is a static chain argument, this should not be an indirect
3117 call, and the decl should have DECL_STATIC_CHAIN set. */
3118 if (gimple_call_chain (stmt))
3119 {
3120 if (!gimple_call_fndecl (stmt))
3121 {
3122 error ("static chain in indirect gimple call");
3123 return true;
3124 }
3125 fn = TREE_OPERAND (fn, 0);
3126
3127 if (!DECL_STATIC_CHAIN (fn))
3128 {
3129 error ("static chain with function that doesn%'t use one");
3130 return true;
3131 }
3132 }
3133
3134 /* ??? The C frontend passes unpromoted arguments in case it
3135 didn't see a function declaration before the call. So for now
3136 leave the call arguments mostly unverified. Once we gimplify
3137 unit-at-a-time we have a chance to fix this. */
3138
3139 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3140 {
3141 tree arg = gimple_call_arg (stmt, i);
3142 if ((is_gimple_reg_type (TREE_TYPE (arg))
3143 && !is_gimple_val (arg))
3144 || (!is_gimple_reg_type (TREE_TYPE (arg))
3145 && !is_gimple_lvalue (arg)))
3146 {
3147 error ("invalid argument to gimple call");
3148 debug_generic_expr (arg);
3149 return true;
3150 }
3151 }
3152
3153 return false;
3154 }
3155
3156 /* Verifies the gimple comparison with the result type TYPE and
3157 the operands OP0 and OP1. */
3158
3159 static bool
3160 verify_gimple_comparison (tree type, tree op0, tree op1)
3161 {
3162 tree op0_type = TREE_TYPE (op0);
3163 tree op1_type = TREE_TYPE (op1);
3164
3165 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3166 {
3167 error ("invalid operands in gimple comparison");
3168 return true;
3169 }
3170
3171 /* For comparisons we do not have the operations type as the
3172 effective type the comparison is carried out in. Instead
3173 we require that either the first operand is trivially
3174 convertible into the second, or the other way around.
3175 Because we special-case pointers to void we allow
3176 comparisons of pointers with the same mode as well. */
3177 if (!useless_type_conversion_p (op0_type, op1_type)
3178 && !useless_type_conversion_p (op1_type, op0_type)
3179 && (!POINTER_TYPE_P (op0_type)
3180 || !POINTER_TYPE_P (op1_type)
3181 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3182 {
3183 error ("mismatching comparison operand types");
3184 debug_generic_expr (op0_type);
3185 debug_generic_expr (op1_type);
3186 return true;
3187 }
3188
3189 /* The resulting type of a comparison may be an effective boolean type. */
3190 if (INTEGRAL_TYPE_P (type)
3191 && (TREE_CODE (type) == BOOLEAN_TYPE
3192 || TYPE_PRECISION (type) == 1))
3193 {
3194 if (TREE_CODE (op0_type) == VECTOR_TYPE
3195 || TREE_CODE (op1_type) == VECTOR_TYPE)
3196 {
3197 error ("vector comparison returning a boolean");
3198 debug_generic_expr (op0_type);
3199 debug_generic_expr (op1_type);
3200 return true;
3201 }
3202 }
3203 /* Or an integer vector type with the same size and element count
3204 as the comparison operand types. */
3205 else if (TREE_CODE (type) == VECTOR_TYPE
3206 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3207 {
3208 if (TREE_CODE (op0_type) != VECTOR_TYPE
3209 || TREE_CODE (op1_type) != VECTOR_TYPE)
3210 {
3211 error ("non-vector operands in vector comparison");
3212 debug_generic_expr (op0_type);
3213 debug_generic_expr (op1_type);
3214 return true;
3215 }
3216
3217 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3218 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3219 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3220 /* The result of a vector comparison is of signed
3221 integral type. */
3222 || TYPE_UNSIGNED (TREE_TYPE (type)))
3223 {
3224 error ("invalid vector comparison resulting type");
3225 debug_generic_expr (type);
3226 return true;
3227 }
3228 }
3229 else
3230 {
3231 error ("bogus comparison result type");
3232 debug_generic_expr (type);
3233 return true;
3234 }
3235
3236 return false;
3237 }
3238
3239 /* Verify a gimple assignment statement STMT with an unary rhs.
3240 Returns true if anything is wrong. */
3241
3242 static bool
3243 verify_gimple_assign_unary (gimple stmt)
3244 {
3245 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3246 tree lhs = gimple_assign_lhs (stmt);
3247 tree lhs_type = TREE_TYPE (lhs);
3248 tree rhs1 = gimple_assign_rhs1 (stmt);
3249 tree rhs1_type = TREE_TYPE (rhs1);
3250
3251 if (!is_gimple_reg (lhs))
3252 {
3253 error ("non-register as LHS of unary operation");
3254 return true;
3255 }
3256
3257 if (!is_gimple_val (rhs1))
3258 {
3259 error ("invalid operand in unary operation");
3260 return true;
3261 }
3262
3263 /* First handle conversions. */
3264 switch (rhs_code)
3265 {
3266 CASE_CONVERT:
3267 {
3268 /* Allow conversions from pointer type to integral type only if
3269 there is no sign or zero extension involved.
3270 For targets were the precision of ptrofftype doesn't match that
3271 of pointers we need to allow arbitrary conversions to ptrofftype. */
3272 if ((POINTER_TYPE_P (lhs_type)
3273 && INTEGRAL_TYPE_P (rhs1_type))
3274 || (POINTER_TYPE_P (rhs1_type)
3275 && INTEGRAL_TYPE_P (lhs_type)
3276 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3277 || ptrofftype_p (sizetype))))
3278 return false;
3279
3280 /* Allow conversion from integral to offset type and vice versa. */
3281 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3282 && INTEGRAL_TYPE_P (rhs1_type))
3283 || (INTEGRAL_TYPE_P (lhs_type)
3284 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3285 return false;
3286
3287 /* Otherwise assert we are converting between types of the
3288 same kind. */
3289 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3290 {
3291 error ("invalid types in nop conversion");
3292 debug_generic_expr (lhs_type);
3293 debug_generic_expr (rhs1_type);
3294 return true;
3295 }
3296
3297 return false;
3298 }
3299
3300 case ADDR_SPACE_CONVERT_EXPR:
3301 {
3302 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3303 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3304 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3305 {
3306 error ("invalid types in address space conversion");
3307 debug_generic_expr (lhs_type);
3308 debug_generic_expr (rhs1_type);
3309 return true;
3310 }
3311
3312 return false;
3313 }
3314
3315 case FIXED_CONVERT_EXPR:
3316 {
3317 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3318 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3319 {
3320 error ("invalid types in fixed-point conversion");
3321 debug_generic_expr (lhs_type);
3322 debug_generic_expr (rhs1_type);
3323 return true;
3324 }
3325
3326 return false;
3327 }
3328
3329 case FLOAT_EXPR:
3330 {
3331 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3332 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3333 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3334 {
3335 error ("invalid types in conversion to floating point");
3336 debug_generic_expr (lhs_type);
3337 debug_generic_expr (rhs1_type);
3338 return true;
3339 }
3340
3341 return false;
3342 }
3343
3344 case FIX_TRUNC_EXPR:
3345 {
3346 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3347 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3348 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3349 {
3350 error ("invalid types in conversion to integer");
3351 debug_generic_expr (lhs_type);
3352 debug_generic_expr (rhs1_type);
3353 return true;
3354 }
3355
3356 return false;
3357 }
3358
3359 case VEC_UNPACK_HI_EXPR:
3360 case VEC_UNPACK_LO_EXPR:
3361 case REDUC_MAX_EXPR:
3362 case REDUC_MIN_EXPR:
3363 case REDUC_PLUS_EXPR:
3364 case VEC_UNPACK_FLOAT_HI_EXPR:
3365 case VEC_UNPACK_FLOAT_LO_EXPR:
3366 /* FIXME. */
3367 return false;
3368
3369 case NEGATE_EXPR:
3370 case ABS_EXPR:
3371 case BIT_NOT_EXPR:
3372 case PAREN_EXPR:
3373 case NON_LVALUE_EXPR:
3374 case CONJ_EXPR:
3375 break;
3376
3377 default:
3378 gcc_unreachable ();
3379 }
3380
3381 /* For the remaining codes assert there is no conversion involved. */
3382 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3383 {
3384 error ("non-trivial conversion in unary operation");
3385 debug_generic_expr (lhs_type);
3386 debug_generic_expr (rhs1_type);
3387 return true;
3388 }
3389
3390 return false;
3391 }
3392
3393 /* Verify a gimple assignment statement STMT with a binary rhs.
3394 Returns true if anything is wrong. */
3395
3396 static bool
3397 verify_gimple_assign_binary (gimple stmt)
3398 {
3399 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3400 tree lhs = gimple_assign_lhs (stmt);
3401 tree lhs_type = TREE_TYPE (lhs);
3402 tree rhs1 = gimple_assign_rhs1 (stmt);
3403 tree rhs1_type = TREE_TYPE (rhs1);
3404 tree rhs2 = gimple_assign_rhs2 (stmt);
3405 tree rhs2_type = TREE_TYPE (rhs2);
3406
3407 if (!is_gimple_reg (lhs))
3408 {
3409 error ("non-register as LHS of binary operation");
3410 return true;
3411 }
3412
3413 if (!is_gimple_val (rhs1)
3414 || !is_gimple_val (rhs2))
3415 {
3416 error ("invalid operands in binary operation");
3417 return true;
3418 }
3419
3420 /* First handle operations that involve different types. */
3421 switch (rhs_code)
3422 {
3423 case COMPLEX_EXPR:
3424 {
3425 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3426 || !(INTEGRAL_TYPE_P (rhs1_type)
3427 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3428 || !(INTEGRAL_TYPE_P (rhs2_type)
3429 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3430 {
3431 error ("type mismatch in complex expression");
3432 debug_generic_expr (lhs_type);
3433 debug_generic_expr (rhs1_type);
3434 debug_generic_expr (rhs2_type);
3435 return true;
3436 }
3437
3438 return false;
3439 }
3440
3441 case LSHIFT_EXPR:
3442 case RSHIFT_EXPR:
3443 case LROTATE_EXPR:
3444 case RROTATE_EXPR:
3445 {
3446 /* Shifts and rotates are ok on integral types, fixed point
3447 types and integer vector types. */
3448 if ((!INTEGRAL_TYPE_P (rhs1_type)
3449 && !FIXED_POINT_TYPE_P (rhs1_type)
3450 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3451 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3452 || (!INTEGRAL_TYPE_P (rhs2_type)
3453 /* Vector shifts of vectors are also ok. */
3454 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3455 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3456 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3457 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3458 || !useless_type_conversion_p (lhs_type, rhs1_type))
3459 {
3460 error ("type mismatch in shift expression");
3461 debug_generic_expr (lhs_type);
3462 debug_generic_expr (rhs1_type);
3463 debug_generic_expr (rhs2_type);
3464 return true;
3465 }
3466
3467 return false;
3468 }
3469
3470 case VEC_LSHIFT_EXPR:
3471 case VEC_RSHIFT_EXPR:
3472 {
3473 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3474 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3475 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3476 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3477 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3478 || (!INTEGRAL_TYPE_P (rhs2_type)
3479 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3480 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3481 || !useless_type_conversion_p (lhs_type, rhs1_type))
3482 {
3483 error ("type mismatch in vector shift expression");
3484 debug_generic_expr (lhs_type);
3485 debug_generic_expr (rhs1_type);
3486 debug_generic_expr (rhs2_type);
3487 return true;
3488 }
3489 /* For shifting a vector of non-integral components we
3490 only allow shifting by a constant multiple of the element size. */
3491 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3492 && (TREE_CODE (rhs2) != INTEGER_CST
3493 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3494 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3495 {
3496 error ("non-element sized vector shift of floating point vector");
3497 return true;
3498 }
3499
3500 return false;
3501 }
3502
3503 case WIDEN_LSHIFT_EXPR:
3504 {
3505 if (!INTEGRAL_TYPE_P (lhs_type)
3506 || !INTEGRAL_TYPE_P (rhs1_type)
3507 || TREE_CODE (rhs2) != INTEGER_CST
3508 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3509 {
3510 error ("type mismatch in widening vector shift expression");
3511 debug_generic_expr (lhs_type);
3512 debug_generic_expr (rhs1_type);
3513 debug_generic_expr (rhs2_type);
3514 return true;
3515 }
3516
3517 return false;
3518 }
3519
3520 case VEC_WIDEN_LSHIFT_HI_EXPR:
3521 case VEC_WIDEN_LSHIFT_LO_EXPR:
3522 {
3523 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3524 || TREE_CODE (lhs_type) != VECTOR_TYPE
3525 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3526 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3527 || TREE_CODE (rhs2) != INTEGER_CST
3528 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3529 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3530 {
3531 error ("type mismatch in widening vector shift expression");
3532 debug_generic_expr (lhs_type);
3533 debug_generic_expr (rhs1_type);
3534 debug_generic_expr (rhs2_type);
3535 return true;
3536 }
3537
3538 return false;
3539 }
3540
3541 case PLUS_EXPR:
3542 case MINUS_EXPR:
3543 {
3544 tree lhs_etype = lhs_type;
3545 tree rhs1_etype = rhs1_type;
3546 tree rhs2_etype = rhs2_type;
3547 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3548 {
3549 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3550 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3551 {
3552 error ("invalid non-vector operands to vector valued plus");
3553 return true;
3554 }
3555 lhs_etype = TREE_TYPE (lhs_type);
3556 rhs1_etype = TREE_TYPE (rhs1_type);
3557 rhs2_etype = TREE_TYPE (rhs2_type);
3558 }
3559 if (POINTER_TYPE_P (lhs_etype)
3560 || POINTER_TYPE_P (rhs1_etype)
3561 || POINTER_TYPE_P (rhs2_etype))
3562 {
3563 error ("invalid (pointer) operands to plus/minus");
3564 return true;
3565 }
3566
3567 /* Continue with generic binary expression handling. */
3568 break;
3569 }
3570
3571 case POINTER_PLUS_EXPR:
3572 {
3573 if (!POINTER_TYPE_P (rhs1_type)
3574 || !useless_type_conversion_p (lhs_type, rhs1_type)
3575 || !ptrofftype_p (rhs2_type))
3576 {
3577 error ("type mismatch in pointer plus expression");
3578 debug_generic_stmt (lhs_type);
3579 debug_generic_stmt (rhs1_type);
3580 debug_generic_stmt (rhs2_type);
3581 return true;
3582 }
3583
3584 return false;
3585 }
3586
3587 case TRUTH_ANDIF_EXPR:
3588 case TRUTH_ORIF_EXPR:
3589 case TRUTH_AND_EXPR:
3590 case TRUTH_OR_EXPR:
3591 case TRUTH_XOR_EXPR:
3592
3593 gcc_unreachable ();
3594
3595 case LT_EXPR:
3596 case LE_EXPR:
3597 case GT_EXPR:
3598 case GE_EXPR:
3599 case EQ_EXPR:
3600 case NE_EXPR:
3601 case UNORDERED_EXPR:
3602 case ORDERED_EXPR:
3603 case UNLT_EXPR:
3604 case UNLE_EXPR:
3605 case UNGT_EXPR:
3606 case UNGE_EXPR:
3607 case UNEQ_EXPR:
3608 case LTGT_EXPR:
3609 /* Comparisons are also binary, but the result type is not
3610 connected to the operand types. */
3611 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3612
3613 case WIDEN_MULT_EXPR:
3614 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3615 return true;
3616 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3617 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3618
3619 case WIDEN_SUM_EXPR:
3620 case VEC_WIDEN_MULT_HI_EXPR:
3621 case VEC_WIDEN_MULT_LO_EXPR:
3622 case VEC_WIDEN_MULT_EVEN_EXPR:
3623 case VEC_WIDEN_MULT_ODD_EXPR:
3624 case VEC_PACK_TRUNC_EXPR:
3625 case VEC_PACK_SAT_EXPR:
3626 case VEC_PACK_FIX_TRUNC_EXPR:
3627 /* FIXME. */
3628 return false;
3629
3630 case MULT_EXPR:
3631 case MULT_HIGHPART_EXPR:
3632 case TRUNC_DIV_EXPR:
3633 case CEIL_DIV_EXPR:
3634 case FLOOR_DIV_EXPR:
3635 case ROUND_DIV_EXPR:
3636 case TRUNC_MOD_EXPR:
3637 case CEIL_MOD_EXPR:
3638 case FLOOR_MOD_EXPR:
3639 case ROUND_MOD_EXPR:
3640 case RDIV_EXPR:
3641 case EXACT_DIV_EXPR:
3642 case MIN_EXPR:
3643 case MAX_EXPR:
3644 case BIT_IOR_EXPR:
3645 case BIT_XOR_EXPR:
3646 case BIT_AND_EXPR:
3647 /* Continue with generic binary expression handling. */
3648 break;
3649
3650 default:
3651 gcc_unreachable ();
3652 }
3653
3654 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3655 || !useless_type_conversion_p (lhs_type, rhs2_type))
3656 {
3657 error ("type mismatch in binary expression");
3658 debug_generic_stmt (lhs_type);
3659 debug_generic_stmt (rhs1_type);
3660 debug_generic_stmt (rhs2_type);
3661 return true;
3662 }
3663
3664 return false;
3665 }
3666
3667 /* Verify a gimple assignment statement STMT with a ternary rhs.
3668 Returns true if anything is wrong. */
3669
3670 static bool
3671 verify_gimple_assign_ternary (gimple stmt)
3672 {
3673 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3674 tree lhs = gimple_assign_lhs (stmt);
3675 tree lhs_type = TREE_TYPE (lhs);
3676 tree rhs1 = gimple_assign_rhs1 (stmt);
3677 tree rhs1_type = TREE_TYPE (rhs1);
3678 tree rhs2 = gimple_assign_rhs2 (stmt);
3679 tree rhs2_type = TREE_TYPE (rhs2);
3680 tree rhs3 = gimple_assign_rhs3 (stmt);
3681 tree rhs3_type = TREE_TYPE (rhs3);
3682
3683 if (!is_gimple_reg (lhs))
3684 {
3685 error ("non-register as LHS of ternary operation");
3686 return true;
3687 }
3688
3689 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3690 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3691 || !is_gimple_val (rhs2)
3692 || !is_gimple_val (rhs3))
3693 {
3694 error ("invalid operands in ternary operation");
3695 return true;
3696 }
3697
3698 /* First handle operations that involve different types. */
3699 switch (rhs_code)
3700 {
3701 case WIDEN_MULT_PLUS_EXPR:
3702 case WIDEN_MULT_MINUS_EXPR:
3703 if ((!INTEGRAL_TYPE_P (rhs1_type)
3704 && !FIXED_POINT_TYPE_P (rhs1_type))
3705 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3706 || !useless_type_conversion_p (lhs_type, rhs3_type)
3707 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3708 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3709 {
3710 error ("type mismatch in widening multiply-accumulate expression");
3711 debug_generic_expr (lhs_type);
3712 debug_generic_expr (rhs1_type);
3713 debug_generic_expr (rhs2_type);
3714 debug_generic_expr (rhs3_type);
3715 return true;
3716 }
3717 break;
3718
3719 case FMA_EXPR:
3720 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3721 || !useless_type_conversion_p (lhs_type, rhs2_type)
3722 || !useless_type_conversion_p (lhs_type, rhs3_type))
3723 {
3724 error ("type mismatch in fused multiply-add expression");
3725 debug_generic_expr (lhs_type);
3726 debug_generic_expr (rhs1_type);
3727 debug_generic_expr (rhs2_type);
3728 debug_generic_expr (rhs3_type);
3729 return true;
3730 }
3731 break;
3732
3733 case COND_EXPR:
3734 case VEC_COND_EXPR:
3735 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3736 || !useless_type_conversion_p (lhs_type, rhs3_type))
3737 {
3738 error ("type mismatch in conditional expression");
3739 debug_generic_expr (lhs_type);
3740 debug_generic_expr (rhs2_type);
3741 debug_generic_expr (rhs3_type);
3742 return true;
3743 }
3744 break;
3745
3746 case VEC_PERM_EXPR:
3747 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3748 || !useless_type_conversion_p (lhs_type, rhs2_type))
3749 {
3750 error ("type mismatch in vector permute expression");
3751 debug_generic_expr (lhs_type);
3752 debug_generic_expr (rhs1_type);
3753 debug_generic_expr (rhs2_type);
3754 debug_generic_expr (rhs3_type);
3755 return true;
3756 }
3757
3758 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3759 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3760 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3761 {
3762 error ("vector types expected in vector permute expression");
3763 debug_generic_expr (lhs_type);
3764 debug_generic_expr (rhs1_type);
3765 debug_generic_expr (rhs2_type);
3766 debug_generic_expr (rhs3_type);
3767 return true;
3768 }
3769
3770 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3771 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3772 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3773 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3774 != TYPE_VECTOR_SUBPARTS (lhs_type))
3775 {
3776 error ("vectors with different element number found "
3777 "in vector permute expression");
3778 debug_generic_expr (lhs_type);
3779 debug_generic_expr (rhs1_type);
3780 debug_generic_expr (rhs2_type);
3781 debug_generic_expr (rhs3_type);
3782 return true;
3783 }
3784
3785 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
3786 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
3787 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
3788 {
3789 error ("invalid mask type in vector permute expression");
3790 debug_generic_expr (lhs_type);
3791 debug_generic_expr (rhs1_type);
3792 debug_generic_expr (rhs2_type);
3793 debug_generic_expr (rhs3_type);
3794 return true;
3795 }
3796
3797 return false;
3798
3799 case DOT_PROD_EXPR:
3800 case REALIGN_LOAD_EXPR:
3801 /* FIXME. */
3802 return false;
3803
3804 default:
3805 gcc_unreachable ();
3806 }
3807 return false;
3808 }
3809
3810 /* Verify a gimple assignment statement STMT with a single rhs.
3811 Returns true if anything is wrong. */
3812
3813 static bool
3814 verify_gimple_assign_single (gimple stmt)
3815 {
3816 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3817 tree lhs = gimple_assign_lhs (stmt);
3818 tree lhs_type = TREE_TYPE (lhs);
3819 tree rhs1 = gimple_assign_rhs1 (stmt);
3820 tree rhs1_type = TREE_TYPE (rhs1);
3821 bool res = false;
3822
3823 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3824 {
3825 error ("non-trivial conversion at assignment");
3826 debug_generic_expr (lhs_type);
3827 debug_generic_expr (rhs1_type);
3828 return true;
3829 }
3830
3831 if (gimple_clobber_p (stmt)
3832 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
3833 {
3834 error ("non-decl/MEM_REF LHS in clobber statement");
3835 debug_generic_expr (lhs);
3836 return true;
3837 }
3838
3839 if (handled_component_p (lhs))
3840 res |= verify_types_in_gimple_reference (lhs, true);
3841
3842 /* Special codes we cannot handle via their class. */
3843 switch (rhs_code)
3844 {
3845 case ADDR_EXPR:
3846 {
3847 tree op = TREE_OPERAND (rhs1, 0);
3848 if (!is_gimple_addressable (op))
3849 {
3850 error ("invalid operand in unary expression");
3851 return true;
3852 }
3853
3854 /* Technically there is no longer a need for matching types, but
3855 gimple hygiene asks for this check. In LTO we can end up
3856 combining incompatible units and thus end up with addresses
3857 of globals that change their type to a common one. */
3858 if (!in_lto_p
3859 && !types_compatible_p (TREE_TYPE (op),
3860 TREE_TYPE (TREE_TYPE (rhs1)))
3861 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
3862 TREE_TYPE (op)))
3863 {
3864 error ("type mismatch in address expression");
3865 debug_generic_stmt (TREE_TYPE (rhs1));
3866 debug_generic_stmt (TREE_TYPE (op));
3867 return true;
3868 }
3869
3870 return verify_types_in_gimple_reference (op, true);
3871 }
3872
3873 /* tcc_reference */
3874 case INDIRECT_REF:
3875 error ("INDIRECT_REF in gimple IL");
3876 return true;
3877
3878 case COMPONENT_REF:
3879 case BIT_FIELD_REF:
3880 case ARRAY_REF:
3881 case ARRAY_RANGE_REF:
3882 case VIEW_CONVERT_EXPR:
3883 case REALPART_EXPR:
3884 case IMAGPART_EXPR:
3885 case TARGET_MEM_REF:
3886 case MEM_REF:
3887 if (!is_gimple_reg (lhs)
3888 && is_gimple_reg_type (TREE_TYPE (lhs)))
3889 {
3890 error ("invalid rhs for gimple memory store");
3891 debug_generic_stmt (lhs);
3892 debug_generic_stmt (rhs1);
3893 return true;
3894 }
3895 return res || verify_types_in_gimple_reference (rhs1, false);
3896
3897 /* tcc_constant */
3898 case SSA_NAME:
3899 case INTEGER_CST:
3900 case REAL_CST:
3901 case FIXED_CST:
3902 case COMPLEX_CST:
3903 case VECTOR_CST:
3904 case STRING_CST:
3905 return res;
3906
3907 /* tcc_declaration */
3908 case CONST_DECL:
3909 return res;
3910 case VAR_DECL:
3911 case PARM_DECL:
3912 if (!is_gimple_reg (lhs)
3913 && !is_gimple_reg (rhs1)
3914 && is_gimple_reg_type (TREE_TYPE (lhs)))
3915 {
3916 error ("invalid rhs for gimple memory store");
3917 debug_generic_stmt (lhs);
3918 debug_generic_stmt (rhs1);
3919 return true;
3920 }
3921 return res;
3922
3923 case CONSTRUCTOR:
3924 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
3925 {
3926 unsigned int i;
3927 tree elt_i, elt_v, elt_t = NULL_TREE;
3928
3929 if (CONSTRUCTOR_NELTS (rhs1) == 0)
3930 return res;
3931 /* For vector CONSTRUCTORs we require that either it is empty
3932 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
3933 (then the element count must be correct to cover the whole
3934 outer vector and index must be NULL on all elements, or it is
3935 a CONSTRUCTOR of scalar elements, where we as an exception allow
3936 smaller number of elements (assuming zero filling) and
3937 consecutive indexes as compared to NULL indexes (such
3938 CONSTRUCTORs can appear in the IL from FEs). */
3939 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
3940 {
3941 if (elt_t == NULL_TREE)
3942 {
3943 elt_t = TREE_TYPE (elt_v);
3944 if (TREE_CODE (elt_t) == VECTOR_TYPE)
3945 {
3946 tree elt_t = TREE_TYPE (elt_v);
3947 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3948 TREE_TYPE (elt_t)))
3949 {
3950 error ("incorrect type of vector CONSTRUCTOR"
3951 " elements");
3952 debug_generic_stmt (rhs1);
3953 return true;
3954 }
3955 else if (CONSTRUCTOR_NELTS (rhs1)
3956 * TYPE_VECTOR_SUBPARTS (elt_t)
3957 != TYPE_VECTOR_SUBPARTS (rhs1_type))
3958 {
3959 error ("incorrect number of vector CONSTRUCTOR"
3960 " elements");
3961 debug_generic_stmt (rhs1);
3962 return true;
3963 }
3964 }
3965 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
3966 elt_t))
3967 {
3968 error ("incorrect type of vector CONSTRUCTOR elements");
3969 debug_generic_stmt (rhs1);
3970 return true;
3971 }
3972 else if (CONSTRUCTOR_NELTS (rhs1)
3973 > TYPE_VECTOR_SUBPARTS (rhs1_type))
3974 {
3975 error ("incorrect number of vector CONSTRUCTOR elements");
3976 debug_generic_stmt (rhs1);
3977 return true;
3978 }
3979 }
3980 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
3981 {
3982 error ("incorrect type of vector CONSTRUCTOR elements");
3983 debug_generic_stmt (rhs1);
3984 return true;
3985 }
3986 if (elt_i != NULL_TREE
3987 && (TREE_CODE (elt_t) == VECTOR_TYPE
3988 || TREE_CODE (elt_i) != INTEGER_CST
3989 || compare_tree_int (elt_i, i) != 0))
3990 {
3991 error ("vector CONSTRUCTOR with non-NULL element index");
3992 debug_generic_stmt (rhs1);
3993 return true;
3994 }
3995 }
3996 }
3997 return res;
3998 case OBJ_TYPE_REF:
3999 case ASSERT_EXPR:
4000 case WITH_SIZE_EXPR:
4001 /* FIXME. */
4002 return res;
4003
4004 default:;
4005 }
4006
4007 return res;
4008 }
4009
4010 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4011 is a problem, otherwise false. */
4012
4013 static bool
4014 verify_gimple_assign (gimple stmt)
4015 {
4016 switch (gimple_assign_rhs_class (stmt))
4017 {
4018 case GIMPLE_SINGLE_RHS:
4019 return verify_gimple_assign_single (stmt);
4020
4021 case GIMPLE_UNARY_RHS:
4022 return verify_gimple_assign_unary (stmt);
4023
4024 case GIMPLE_BINARY_RHS:
4025 return verify_gimple_assign_binary (stmt);
4026
4027 case GIMPLE_TERNARY_RHS:
4028 return verify_gimple_assign_ternary (stmt);
4029
4030 default:
4031 gcc_unreachable ();
4032 }
4033 }
4034
4035 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4036 is a problem, otherwise false. */
4037
4038 static bool
4039 verify_gimple_return (gimple stmt)
4040 {
4041 tree op = gimple_return_retval (stmt);
4042 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4043
4044 /* We cannot test for present return values as we do not fix up missing
4045 return values from the original source. */
4046 if (op == NULL)
4047 return false;
4048
4049 if (!is_gimple_val (op)
4050 && TREE_CODE (op) != RESULT_DECL)
4051 {
4052 error ("invalid operand in return statement");
4053 debug_generic_stmt (op);
4054 return true;
4055 }
4056
4057 if ((TREE_CODE (op) == RESULT_DECL
4058 && DECL_BY_REFERENCE (op))
4059 || (TREE_CODE (op) == SSA_NAME
4060 && SSA_NAME_VAR (op)
4061 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4062 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4063 op = TREE_TYPE (op);
4064
4065 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4066 {
4067 error ("invalid conversion in return statement");
4068 debug_generic_stmt (restype);
4069 debug_generic_stmt (TREE_TYPE (op));
4070 return true;
4071 }
4072
4073 return false;
4074 }
4075
4076
4077 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4078 is a problem, otherwise false. */
4079
4080 static bool
4081 verify_gimple_goto (gimple stmt)
4082 {
4083 tree dest = gimple_goto_dest (stmt);
4084
4085 /* ??? We have two canonical forms of direct goto destinations, a
4086 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4087 if (TREE_CODE (dest) != LABEL_DECL
4088 && (!is_gimple_val (dest)
4089 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4090 {
4091 error ("goto destination is neither a label nor a pointer");
4092 return true;
4093 }
4094
4095 return false;
4096 }
4097
4098 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4099 is a problem, otherwise false. */
4100
4101 static bool
4102 verify_gimple_switch (gimple stmt)
4103 {
4104 unsigned int i, n;
4105 tree elt, prev_upper_bound = NULL_TREE;
4106 tree index_type, elt_type = NULL_TREE;
4107
4108 if (!is_gimple_val (gimple_switch_index (stmt)))
4109 {
4110 error ("invalid operand to switch statement");
4111 debug_generic_stmt (gimple_switch_index (stmt));
4112 return true;
4113 }
4114
4115 index_type = TREE_TYPE (gimple_switch_index (stmt));
4116 if (! INTEGRAL_TYPE_P (index_type))
4117 {
4118 error ("non-integral type switch statement");
4119 debug_generic_expr (index_type);
4120 return true;
4121 }
4122
4123 elt = gimple_switch_label (stmt, 0);
4124 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4125 {
4126 error ("invalid default case label in switch statement");
4127 debug_generic_expr (elt);
4128 return true;
4129 }
4130
4131 n = gimple_switch_num_labels (stmt);
4132 for (i = 1; i < n; i++)
4133 {
4134 elt = gimple_switch_label (stmt, i);
4135
4136 if (! CASE_LOW (elt))
4137 {
4138 error ("invalid case label in switch statement");
4139 debug_generic_expr (elt);
4140 return true;
4141 }
4142 if (CASE_HIGH (elt)
4143 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4144 {
4145 error ("invalid case range in switch statement");
4146 debug_generic_expr (elt);
4147 return true;
4148 }
4149
4150 if (elt_type)
4151 {
4152 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4153 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4154 {
4155 error ("type mismatch for case label in switch statement");
4156 debug_generic_expr (elt);
4157 return true;
4158 }
4159 }
4160 else
4161 {
4162 elt_type = TREE_TYPE (CASE_LOW (elt));
4163 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4164 {
4165 error ("type precision mismatch in switch statement");
4166 return true;
4167 }
4168 }
4169
4170 if (prev_upper_bound)
4171 {
4172 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4173 {
4174 error ("case labels not sorted in switch statement");
4175 return true;
4176 }
4177 }
4178
4179 prev_upper_bound = CASE_HIGH (elt);
4180 if (! prev_upper_bound)
4181 prev_upper_bound = CASE_LOW (elt);
4182 }
4183
4184 return false;
4185 }
4186
4187 /* Verify a gimple debug statement STMT.
4188 Returns true if anything is wrong. */
4189
4190 static bool
4191 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4192 {
4193 /* There isn't much that could be wrong in a gimple debug stmt. A
4194 gimple debug bind stmt, for example, maps a tree, that's usually
4195 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4196 component or member of an aggregate type, to another tree, that
4197 can be an arbitrary expression. These stmts expand into debug
4198 insns, and are converted to debug notes by var-tracking.c. */
4199 return false;
4200 }
4201
4202 /* Verify a gimple label statement STMT.
4203 Returns true if anything is wrong. */
4204
4205 static bool
4206 verify_gimple_label (gimple stmt)
4207 {
4208 tree decl = gimple_label_label (stmt);
4209 int uid;
4210 bool err = false;
4211
4212 if (TREE_CODE (decl) != LABEL_DECL)
4213 return true;
4214 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4215 && DECL_CONTEXT (decl) != current_function_decl)
4216 {
4217 error ("label's context is not the current function decl");
4218 err |= true;
4219 }
4220
4221 uid = LABEL_DECL_UID (decl);
4222 if (cfun->cfg
4223 && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
4224 {
4225 error ("incorrect entry in label_to_block_map");
4226 err |= true;
4227 }
4228
4229 uid = EH_LANDING_PAD_NR (decl);
4230 if (uid)
4231 {
4232 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4233 if (decl != lp->post_landing_pad)
4234 {
4235 error ("incorrect setting of landing pad number");
4236 err |= true;
4237 }
4238 }
4239
4240 return err;
4241 }
4242
4243 /* Verify the GIMPLE statement STMT. Returns true if there is an
4244 error, otherwise false. */
4245
4246 static bool
4247 verify_gimple_stmt (gimple stmt)
4248 {
4249 switch (gimple_code (stmt))
4250 {
4251 case GIMPLE_ASSIGN:
4252 return verify_gimple_assign (stmt);
4253
4254 case GIMPLE_LABEL:
4255 return verify_gimple_label (stmt);
4256
4257 case GIMPLE_CALL:
4258 return verify_gimple_call (stmt);
4259
4260 case GIMPLE_COND:
4261 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4262 {
4263 error ("invalid comparison code in gimple cond");
4264 return true;
4265 }
4266 if (!(!gimple_cond_true_label (stmt)
4267 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4268 || !(!gimple_cond_false_label (stmt)
4269 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4270 {
4271 error ("invalid labels in gimple cond");
4272 return true;
4273 }
4274
4275 return verify_gimple_comparison (boolean_type_node,
4276 gimple_cond_lhs (stmt),
4277 gimple_cond_rhs (stmt));
4278
4279 case GIMPLE_GOTO:
4280 return verify_gimple_goto (stmt);
4281
4282 case GIMPLE_SWITCH:
4283 return verify_gimple_switch (stmt);
4284
4285 case GIMPLE_RETURN:
4286 return verify_gimple_return (stmt);
4287
4288 case GIMPLE_ASM:
4289 return false;
4290
4291 case GIMPLE_TRANSACTION:
4292 return verify_gimple_transaction (stmt);
4293
4294 /* Tuples that do not have tree operands. */
4295 case GIMPLE_NOP:
4296 case GIMPLE_PREDICT:
4297 case GIMPLE_RESX:
4298 case GIMPLE_EH_DISPATCH:
4299 case GIMPLE_EH_MUST_NOT_THROW:
4300 return false;
4301
4302 CASE_GIMPLE_OMP:
4303 /* OpenMP directives are validated by the FE and never operated
4304 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4305 non-gimple expressions when the main index variable has had
4306 its address taken. This does not affect the loop itself
4307 because the header of an GIMPLE_OMP_FOR is merely used to determine
4308 how to setup the parallel iteration. */
4309 return false;
4310
4311 case GIMPLE_DEBUG:
4312 return verify_gimple_debug (stmt);
4313
4314 default:
4315 gcc_unreachable ();
4316 }
4317 }
4318
4319 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4320 and false otherwise. */
4321
4322 static bool
4323 verify_gimple_phi (gimple phi)
4324 {
4325 bool err = false;
4326 unsigned i;
4327 tree phi_result = gimple_phi_result (phi);
4328 bool virtual_p;
4329
4330 if (!phi_result)
4331 {
4332 error ("invalid PHI result");
4333 return true;
4334 }
4335
4336 virtual_p = virtual_operand_p (phi_result);
4337 if (TREE_CODE (phi_result) != SSA_NAME
4338 || (virtual_p
4339 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4340 {
4341 error ("invalid PHI result");
4342 err = true;
4343 }
4344
4345 for (i = 0; i < gimple_phi_num_args (phi); i++)
4346 {
4347 tree t = gimple_phi_arg_def (phi, i);
4348
4349 if (!t)
4350 {
4351 error ("missing PHI def");
4352 err |= true;
4353 continue;
4354 }
4355 /* Addressable variables do have SSA_NAMEs but they
4356 are not considered gimple values. */
4357 else if ((TREE_CODE (t) == SSA_NAME
4358 && virtual_p != virtual_operand_p (t))
4359 || (virtual_p
4360 && (TREE_CODE (t) != SSA_NAME
4361 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4362 || (!virtual_p
4363 && !is_gimple_val (t)))
4364 {
4365 error ("invalid PHI argument");
4366 debug_generic_expr (t);
4367 err |= true;
4368 }
4369 #ifdef ENABLE_TYPES_CHECKING
4370 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4371 {
4372 error ("incompatible types in PHI argument %u", i);
4373 debug_generic_stmt (TREE_TYPE (phi_result));
4374 debug_generic_stmt (TREE_TYPE (t));
4375 err |= true;
4376 }
4377 #endif
4378 }
4379
4380 return err;
4381 }
4382
4383 /* Verify the GIMPLE statements inside the sequence STMTS. */
4384
4385 static bool
4386 verify_gimple_in_seq_2 (gimple_seq stmts)
4387 {
4388 gimple_stmt_iterator ittr;
4389 bool err = false;
4390
4391 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4392 {
4393 gimple stmt = gsi_stmt (ittr);
4394
4395 switch (gimple_code (stmt))
4396 {
4397 case GIMPLE_BIND:
4398 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4399 break;
4400
4401 case GIMPLE_TRY:
4402 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4403 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4404 break;
4405
4406 case GIMPLE_EH_FILTER:
4407 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4408 break;
4409
4410 case GIMPLE_EH_ELSE:
4411 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4412 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4413 break;
4414
4415 case GIMPLE_CATCH:
4416 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4417 break;
4418
4419 case GIMPLE_TRANSACTION:
4420 err |= verify_gimple_transaction (stmt);
4421 break;
4422
4423 default:
4424 {
4425 bool err2 = verify_gimple_stmt (stmt);
4426 if (err2)
4427 debug_gimple_stmt (stmt);
4428 err |= err2;
4429 }
4430 }
4431 }
4432
4433 return err;
4434 }
4435
4436 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4437 is a problem, otherwise false. */
4438
4439 static bool
4440 verify_gimple_transaction (gimple stmt)
4441 {
4442 tree lab = gimple_transaction_label (stmt);
4443 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4444 return true;
4445 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4446 }
4447
4448
4449 /* Verify the GIMPLE statements inside the statement list STMTS. */
4450
4451 DEBUG_FUNCTION void
4452 verify_gimple_in_seq (gimple_seq stmts)
4453 {
4454 timevar_push (TV_TREE_STMT_VERIFY);
4455 if (verify_gimple_in_seq_2 (stmts))
4456 internal_error ("verify_gimple failed");
4457 timevar_pop (TV_TREE_STMT_VERIFY);
4458 }
4459
4460 /* Return true when the T can be shared. */
4461
4462 static bool
4463 tree_node_can_be_shared (tree t)
4464 {
4465 if (IS_TYPE_OR_DECL_P (t)
4466 || is_gimple_min_invariant (t)
4467 || TREE_CODE (t) == SSA_NAME
4468 || t == error_mark_node
4469 || TREE_CODE (t) == IDENTIFIER_NODE)
4470 return true;
4471
4472 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4473 return true;
4474
4475 if (DECL_P (t))
4476 return true;
4477
4478 return false;
4479 }
4480
4481 /* Called via walk_tree. Verify tree sharing. */
4482
4483 static tree
4484 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4485 {
4486 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4487
4488 if (tree_node_can_be_shared (*tp))
4489 {
4490 *walk_subtrees = false;
4491 return NULL;
4492 }
4493
4494 if (pointer_set_insert (visited, *tp))
4495 return *tp;
4496
4497 return NULL;
4498 }
4499
4500 /* Called via walk_gimple_stmt. Verify tree sharing. */
4501
4502 static tree
4503 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4504 {
4505 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4506 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4507 }
4508
4509 static bool eh_error_found;
4510 static int
4511 verify_eh_throw_stmt_node (void **slot, void *data)
4512 {
4513 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4514 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4515
4516 if (!pointer_set_contains (visited, node->stmt))
4517 {
4518 error ("dead STMT in EH table");
4519 debug_gimple_stmt (node->stmt);
4520 eh_error_found = true;
4521 }
4522 return 1;
4523 }
4524
4525 /* Verify if the location LOCs block is in BLOCKS. */
4526
4527 static bool
4528 verify_location (pointer_set_t *blocks, location_t loc)
4529 {
4530 tree block = LOCATION_BLOCK (loc);
4531 if (block != NULL_TREE
4532 && !pointer_set_contains (blocks, block))
4533 {
4534 error ("location references block not in block tree");
4535 return true;
4536 }
4537 if (block != NULL_TREE)
4538 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4539 return false;
4540 }
4541
4542 /* Called via walk_tree. Verify that expressions have no blocks. */
4543
4544 static tree
4545 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4546 {
4547 if (!EXPR_P (*tp))
4548 {
4549 *walk_subtrees = false;
4550 return NULL;
4551 }
4552
4553 location_t loc = EXPR_LOCATION (*tp);
4554 if (LOCATION_BLOCK (loc) != NULL)
4555 return *tp;
4556
4557 return NULL;
4558 }
4559
4560 /* Called via walk_tree. Verify locations of expressions. */
4561
4562 static tree
4563 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4564 {
4565 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4566
4567 if (TREE_CODE (*tp) == VAR_DECL
4568 && DECL_HAS_DEBUG_EXPR_P (*tp))
4569 {
4570 tree t = DECL_DEBUG_EXPR (*tp);
4571 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4572 if (addr)
4573 return addr;
4574 }
4575 if ((TREE_CODE (*tp) == VAR_DECL
4576 || TREE_CODE (*tp) == PARM_DECL
4577 || TREE_CODE (*tp) == RESULT_DECL)
4578 && DECL_HAS_VALUE_EXPR_P (*tp))
4579 {
4580 tree t = DECL_VALUE_EXPR (*tp);
4581 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4582 if (addr)
4583 return addr;
4584 }
4585
4586 if (!EXPR_P (*tp))
4587 {
4588 *walk_subtrees = false;
4589 return NULL;
4590 }
4591
4592 location_t loc = EXPR_LOCATION (*tp);
4593 if (verify_location (blocks, loc))
4594 return *tp;
4595
4596 return NULL;
4597 }
4598
4599 /* Called via walk_gimple_op. Verify locations of expressions. */
4600
4601 static tree
4602 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4603 {
4604 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4605 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4606 }
4607
4608 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4609
4610 static void
4611 collect_subblocks (pointer_set_t *blocks, tree block)
4612 {
4613 tree t;
4614 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4615 {
4616 pointer_set_insert (blocks, t);
4617 collect_subblocks (blocks, t);
4618 }
4619 }
4620
4621 /* Verify the GIMPLE statements in the CFG of FN. */
4622
4623 DEBUG_FUNCTION void
4624 verify_gimple_in_cfg (struct function *fn)
4625 {
4626 basic_block bb;
4627 bool err = false;
4628 struct pointer_set_t *visited, *visited_stmts, *blocks;
4629
4630 timevar_push (TV_TREE_STMT_VERIFY);
4631 visited = pointer_set_create ();
4632 visited_stmts = pointer_set_create ();
4633
4634 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4635 blocks = pointer_set_create ();
4636 if (DECL_INITIAL (fn->decl))
4637 {
4638 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4639 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4640 }
4641
4642 FOR_EACH_BB_FN (bb, fn)
4643 {
4644 gimple_stmt_iterator gsi;
4645
4646 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4647 {
4648 gimple phi = gsi_stmt (gsi);
4649 bool err2 = false;
4650 unsigned i;
4651
4652 pointer_set_insert (visited_stmts, phi);
4653
4654 if (gimple_bb (phi) != bb)
4655 {
4656 error ("gimple_bb (phi) is set to a wrong basic block");
4657 err2 = true;
4658 }
4659
4660 err2 |= verify_gimple_phi (phi);
4661
4662 /* Only PHI arguments have locations. */
4663 if (gimple_location (phi) != UNKNOWN_LOCATION)
4664 {
4665 error ("PHI node with location");
4666 err2 = true;
4667 }
4668
4669 for (i = 0; i < gimple_phi_num_args (phi); i++)
4670 {
4671 tree arg = gimple_phi_arg_def (phi, i);
4672 tree addr = walk_tree (&arg, verify_node_sharing_1,
4673 visited, NULL);
4674 if (addr)
4675 {
4676 error ("incorrect sharing of tree nodes");
4677 debug_generic_expr (addr);
4678 err2 |= true;
4679 }
4680 location_t loc = gimple_phi_arg_location (phi, i);
4681 if (virtual_operand_p (gimple_phi_result (phi))
4682 && loc != UNKNOWN_LOCATION)
4683 {
4684 error ("virtual PHI with argument locations");
4685 err2 = true;
4686 }
4687 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4688 if (addr)
4689 {
4690 debug_generic_expr (addr);
4691 err2 = true;
4692 }
4693 err2 |= verify_location (blocks, loc);
4694 }
4695
4696 if (err2)
4697 debug_gimple_stmt (phi);
4698 err |= err2;
4699 }
4700
4701 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4702 {
4703 gimple stmt = gsi_stmt (gsi);
4704 bool err2 = false;
4705 struct walk_stmt_info wi;
4706 tree addr;
4707 int lp_nr;
4708
4709 pointer_set_insert (visited_stmts, stmt);
4710
4711 if (gimple_bb (stmt) != bb)
4712 {
4713 error ("gimple_bb (stmt) is set to a wrong basic block");
4714 err2 = true;
4715 }
4716
4717 err2 |= verify_gimple_stmt (stmt);
4718 err2 |= verify_location (blocks, gimple_location (stmt));
4719
4720 memset (&wi, 0, sizeof (wi));
4721 wi.info = (void *) visited;
4722 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4723 if (addr)
4724 {
4725 error ("incorrect sharing of tree nodes");
4726 debug_generic_expr (addr);
4727 err2 |= true;
4728 }
4729
4730 memset (&wi, 0, sizeof (wi));
4731 wi.info = (void *) blocks;
4732 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4733 if (addr)
4734 {
4735 debug_generic_expr (addr);
4736 err2 |= true;
4737 }
4738
4739 /* ??? Instead of not checking these stmts at all the walker
4740 should know its context via wi. */
4741 if (!is_gimple_debug (stmt)
4742 && !is_gimple_omp (stmt))
4743 {
4744 memset (&wi, 0, sizeof (wi));
4745 addr = walk_gimple_op (stmt, verify_expr, &wi);
4746 if (addr)
4747 {
4748 debug_generic_expr (addr);
4749 inform (gimple_location (stmt), "in statement");
4750 err2 |= true;
4751 }
4752 }
4753
4754 /* If the statement is marked as part of an EH region, then it is
4755 expected that the statement could throw. Verify that when we
4756 have optimizations that simplify statements such that we prove
4757 that they cannot throw, that we update other data structures
4758 to match. */
4759 lp_nr = lookup_stmt_eh_lp (stmt);
4760 if (lp_nr != 0)
4761 {
4762 if (!stmt_could_throw_p (stmt))
4763 {
4764 error ("statement marked for throw, but doesn%'t");
4765 err2 |= true;
4766 }
4767 else if (lp_nr > 0
4768 && !gsi_one_before_end_p (gsi)
4769 && stmt_can_throw_internal (stmt))
4770 {
4771 error ("statement marked for throw in middle of block");
4772 err2 |= true;
4773 }
4774 }
4775
4776 if (err2)
4777 debug_gimple_stmt (stmt);
4778 err |= err2;
4779 }
4780 }
4781
4782 eh_error_found = false;
4783 if (get_eh_throw_stmt_table (cfun))
4784 htab_traverse (get_eh_throw_stmt_table (cfun),
4785 verify_eh_throw_stmt_node,
4786 visited_stmts);
4787
4788 if (err || eh_error_found)
4789 internal_error ("verify_gimple failed");
4790
4791 pointer_set_destroy (visited);
4792 pointer_set_destroy (visited_stmts);
4793 pointer_set_destroy (blocks);
4794 verify_histograms ();
4795 timevar_pop (TV_TREE_STMT_VERIFY);
4796 }
4797
4798
4799 /* Verifies that the flow information is OK. */
4800
4801 static int
4802 gimple_verify_flow_info (void)
4803 {
4804 int err = 0;
4805 basic_block bb;
4806 gimple_stmt_iterator gsi;
4807 gimple stmt;
4808 edge e;
4809 edge_iterator ei;
4810
4811 if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
4812 {
4813 error ("ENTRY_BLOCK has IL associated with it");
4814 err = 1;
4815 }
4816
4817 if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
4818 {
4819 error ("EXIT_BLOCK has IL associated with it");
4820 err = 1;
4821 }
4822
4823 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
4824 if (e->flags & EDGE_FALLTHRU)
4825 {
4826 error ("fallthru to exit from bb %d", e->src->index);
4827 err = 1;
4828 }
4829
4830 FOR_EACH_BB (bb)
4831 {
4832 bool found_ctrl_stmt = false;
4833
4834 stmt = NULL;
4835
4836 /* Skip labels on the start of basic block. */
4837 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4838 {
4839 tree label;
4840 gimple prev_stmt = stmt;
4841
4842 stmt = gsi_stmt (gsi);
4843
4844 if (gimple_code (stmt) != GIMPLE_LABEL)
4845 break;
4846
4847 label = gimple_label_label (stmt);
4848 if (prev_stmt && DECL_NONLOCAL (label))
4849 {
4850 error ("nonlocal label ");
4851 print_generic_expr (stderr, label, 0);
4852 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4853 bb->index);
4854 err = 1;
4855 }
4856
4857 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
4858 {
4859 error ("EH landing pad label ");
4860 print_generic_expr (stderr, label, 0);
4861 fprintf (stderr, " is not first in a sequence of labels in bb %d",
4862 bb->index);
4863 err = 1;
4864 }
4865
4866 if (label_to_block (label) != bb)
4867 {
4868 error ("label ");
4869 print_generic_expr (stderr, label, 0);
4870 fprintf (stderr, " to block does not match in bb %d",
4871 bb->index);
4872 err = 1;
4873 }
4874
4875 if (decl_function_context (label) != current_function_decl)
4876 {
4877 error ("label ");
4878 print_generic_expr (stderr, label, 0);
4879 fprintf (stderr, " has incorrect context in bb %d",
4880 bb->index);
4881 err = 1;
4882 }
4883 }
4884
4885 /* Verify that body of basic block BB is free of control flow. */
4886 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4887 {
4888 gimple stmt = gsi_stmt (gsi);
4889
4890 if (found_ctrl_stmt)
4891 {
4892 error ("control flow in the middle of basic block %d",
4893 bb->index);
4894 err = 1;
4895 }
4896
4897 if (stmt_ends_bb_p (stmt))
4898 found_ctrl_stmt = true;
4899
4900 if (gimple_code (stmt) == GIMPLE_LABEL)
4901 {
4902 error ("label ");
4903 print_generic_expr (stderr, gimple_label_label (stmt), 0);
4904 fprintf (stderr, " in the middle of basic block %d", bb->index);
4905 err = 1;
4906 }
4907 }
4908
4909 gsi = gsi_last_bb (bb);
4910 if (gsi_end_p (gsi))
4911 continue;
4912
4913 stmt = gsi_stmt (gsi);
4914
4915 if (gimple_code (stmt) == GIMPLE_LABEL)
4916 continue;
4917
4918 err |= verify_eh_edges (stmt);
4919
4920 if (is_ctrl_stmt (stmt))
4921 {
4922 FOR_EACH_EDGE (e, ei, bb->succs)
4923 if (e->flags & EDGE_FALLTHRU)
4924 {
4925 error ("fallthru edge after a control statement in bb %d",
4926 bb->index);
4927 err = 1;
4928 }
4929 }
4930
4931 if (gimple_code (stmt) != GIMPLE_COND)
4932 {
4933 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
4934 after anything else but if statement. */
4935 FOR_EACH_EDGE (e, ei, bb->succs)
4936 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
4937 {
4938 error ("true/false edge after a non-GIMPLE_COND in bb %d",
4939 bb->index);
4940 err = 1;
4941 }
4942 }
4943
4944 switch (gimple_code (stmt))
4945 {
4946 case GIMPLE_COND:
4947 {
4948 edge true_edge;
4949 edge false_edge;
4950
4951 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
4952
4953 if (!true_edge
4954 || !false_edge
4955 || !(true_edge->flags & EDGE_TRUE_VALUE)
4956 || !(false_edge->flags & EDGE_FALSE_VALUE)
4957 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4958 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
4959 || EDGE_COUNT (bb->succs) >= 3)
4960 {
4961 error ("wrong outgoing edge flags at end of bb %d",
4962 bb->index);
4963 err = 1;
4964 }
4965 }
4966 break;
4967
4968 case GIMPLE_GOTO:
4969 if (simple_goto_p (stmt))
4970 {
4971 error ("explicit goto at end of bb %d", bb->index);
4972 err = 1;
4973 }
4974 else
4975 {
4976 /* FIXME. We should double check that the labels in the
4977 destination blocks have their address taken. */
4978 FOR_EACH_EDGE (e, ei, bb->succs)
4979 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
4980 | EDGE_FALSE_VALUE))
4981 || !(e->flags & EDGE_ABNORMAL))
4982 {
4983 error ("wrong outgoing edge flags at end of bb %d",
4984 bb->index);
4985 err = 1;
4986 }
4987 }
4988 break;
4989
4990 case GIMPLE_CALL:
4991 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
4992 break;
4993 /* ... fallthru ... */
4994 case GIMPLE_RETURN:
4995 if (!single_succ_p (bb)
4996 || (single_succ_edge (bb)->flags
4997 & (EDGE_FALLTHRU | EDGE_ABNORMAL
4998 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
4999 {
5000 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5001 err = 1;
5002 }
5003 if (single_succ (bb) != EXIT_BLOCK_PTR)
5004 {
5005 error ("return edge does not point to exit in bb %d",
5006 bb->index);
5007 err = 1;
5008 }
5009 break;
5010
5011 case GIMPLE_SWITCH:
5012 {
5013 tree prev;
5014 edge e;
5015 size_t i, n;
5016
5017 n = gimple_switch_num_labels (stmt);
5018
5019 /* Mark all the destination basic blocks. */
5020 for (i = 0; i < n; ++i)
5021 {
5022 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5023 basic_block label_bb = label_to_block (lab);
5024 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5025 label_bb->aux = (void *)1;
5026 }
5027
5028 /* Verify that the case labels are sorted. */
5029 prev = gimple_switch_label (stmt, 0);
5030 for (i = 1; i < n; ++i)
5031 {
5032 tree c = gimple_switch_label (stmt, i);
5033 if (!CASE_LOW (c))
5034 {
5035 error ("found default case not at the start of "
5036 "case vector");
5037 err = 1;
5038 continue;
5039 }
5040 if (CASE_LOW (prev)
5041 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5042 {
5043 error ("case labels not sorted: ");
5044 print_generic_expr (stderr, prev, 0);
5045 fprintf (stderr," is greater than ");
5046 print_generic_expr (stderr, c, 0);
5047 fprintf (stderr," but comes before it.\n");
5048 err = 1;
5049 }
5050 prev = c;
5051 }
5052 /* VRP will remove the default case if it can prove it will
5053 never be executed. So do not verify there always exists
5054 a default case here. */
5055
5056 FOR_EACH_EDGE (e, ei, bb->succs)
5057 {
5058 if (!e->dest->aux)
5059 {
5060 error ("extra outgoing edge %d->%d",
5061 bb->index, e->dest->index);
5062 err = 1;
5063 }
5064
5065 e->dest->aux = (void *)2;
5066 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5067 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5068 {
5069 error ("wrong outgoing edge flags at end of bb %d",
5070 bb->index);
5071 err = 1;
5072 }
5073 }
5074
5075 /* Check that we have all of them. */
5076 for (i = 0; i < n; ++i)
5077 {
5078 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5079 basic_block label_bb = label_to_block (lab);
5080
5081 if (label_bb->aux != (void *)2)
5082 {
5083 error ("missing edge %i->%i", bb->index, label_bb->index);
5084 err = 1;
5085 }
5086 }
5087
5088 FOR_EACH_EDGE (e, ei, bb->succs)
5089 e->dest->aux = (void *)0;
5090 }
5091 break;
5092
5093 case GIMPLE_EH_DISPATCH:
5094 err |= verify_eh_dispatch_edge (stmt);
5095 break;
5096
5097 default:
5098 break;
5099 }
5100 }
5101
5102 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5103 verify_dominators (CDI_DOMINATORS);
5104
5105 return err;
5106 }
5107
5108
5109 /* Updates phi nodes after creating a forwarder block joined
5110 by edge FALLTHRU. */
5111
5112 static void
5113 gimple_make_forwarder_block (edge fallthru)
5114 {
5115 edge e;
5116 edge_iterator ei;
5117 basic_block dummy, bb;
5118 tree var;
5119 gimple_stmt_iterator gsi;
5120
5121 dummy = fallthru->src;
5122 bb = fallthru->dest;
5123
5124 if (single_pred_p (bb))
5125 return;
5126
5127 /* If we redirected a branch we must create new PHI nodes at the
5128 start of BB. */
5129 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5130 {
5131 gimple phi, new_phi;
5132
5133 phi = gsi_stmt (gsi);
5134 var = gimple_phi_result (phi);
5135 new_phi = create_phi_node (var, bb);
5136 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5137 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5138 UNKNOWN_LOCATION);
5139 }
5140
5141 /* Add the arguments we have stored on edges. */
5142 FOR_EACH_EDGE (e, ei, bb->preds)
5143 {
5144 if (e == fallthru)
5145 continue;
5146
5147 flush_pending_stmts (e);
5148 }
5149 }
5150
5151
5152 /* Return a non-special label in the head of basic block BLOCK.
5153 Create one if it doesn't exist. */
5154
5155 tree
5156 gimple_block_label (basic_block bb)
5157 {
5158 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5159 bool first = true;
5160 tree label;
5161 gimple stmt;
5162
5163 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5164 {
5165 stmt = gsi_stmt (i);
5166 if (gimple_code (stmt) != GIMPLE_LABEL)
5167 break;
5168 label = gimple_label_label (stmt);
5169 if (!DECL_NONLOCAL (label))
5170 {
5171 if (!first)
5172 gsi_move_before (&i, &s);
5173 return label;
5174 }
5175 }
5176
5177 label = create_artificial_label (UNKNOWN_LOCATION);
5178 stmt = gimple_build_label (label);
5179 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5180 return label;
5181 }
5182
5183
5184 /* Attempt to perform edge redirection by replacing a possibly complex
5185 jump instruction by a goto or by removing the jump completely.
5186 This can apply only if all edges now point to the same block. The
5187 parameters and return values are equivalent to
5188 redirect_edge_and_branch. */
5189
5190 static edge
5191 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5192 {
5193 basic_block src = e->src;
5194 gimple_stmt_iterator i;
5195 gimple stmt;
5196
5197 /* We can replace or remove a complex jump only when we have exactly
5198 two edges. */
5199 if (EDGE_COUNT (src->succs) != 2
5200 /* Verify that all targets will be TARGET. Specifically, the
5201 edge that is not E must also go to TARGET. */
5202 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5203 return NULL;
5204
5205 i = gsi_last_bb (src);
5206 if (gsi_end_p (i))
5207 return NULL;
5208
5209 stmt = gsi_stmt (i);
5210
5211 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5212 {
5213 gsi_remove (&i, true);
5214 e = ssa_redirect_edge (e, target);
5215 e->flags = EDGE_FALLTHRU;
5216 return e;
5217 }
5218
5219 return NULL;
5220 }
5221
5222
5223 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5224 edge representing the redirected branch. */
5225
5226 static edge
5227 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5228 {
5229 basic_block bb = e->src;
5230 gimple_stmt_iterator gsi;
5231 edge ret;
5232 gimple stmt;
5233
5234 if (e->flags & EDGE_ABNORMAL)
5235 return NULL;
5236
5237 if (e->dest == dest)
5238 return NULL;
5239
5240 if (e->flags & EDGE_EH)
5241 return redirect_eh_edge (e, dest);
5242
5243 if (e->src != ENTRY_BLOCK_PTR)
5244 {
5245 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5246 if (ret)
5247 return ret;
5248 }
5249
5250 gsi = gsi_last_bb (bb);
5251 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5252
5253 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5254 {
5255 case GIMPLE_COND:
5256 /* For COND_EXPR, we only need to redirect the edge. */
5257 break;
5258
5259 case GIMPLE_GOTO:
5260 /* No non-abnormal edges should lead from a non-simple goto, and
5261 simple ones should be represented implicitly. */
5262 gcc_unreachable ();
5263
5264 case GIMPLE_SWITCH:
5265 {
5266 tree label = gimple_block_label (dest);
5267 tree cases = get_cases_for_edge (e, stmt);
5268
5269 /* If we have a list of cases associated with E, then use it
5270 as it's a lot faster than walking the entire case vector. */
5271 if (cases)
5272 {
5273 edge e2 = find_edge (e->src, dest);
5274 tree last, first;
5275
5276 first = cases;
5277 while (cases)
5278 {
5279 last = cases;
5280 CASE_LABEL (cases) = label;
5281 cases = CASE_CHAIN (cases);
5282 }
5283
5284 /* If there was already an edge in the CFG, then we need
5285 to move all the cases associated with E to E2. */
5286 if (e2)
5287 {
5288 tree cases2 = get_cases_for_edge (e2, stmt);
5289
5290 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5291 CASE_CHAIN (cases2) = first;
5292 }
5293 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5294 }
5295 else
5296 {
5297 size_t i, n = gimple_switch_num_labels (stmt);
5298
5299 for (i = 0; i < n; i++)
5300 {
5301 tree elt = gimple_switch_label (stmt, i);
5302 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5303 CASE_LABEL (elt) = label;
5304 }
5305 }
5306 }
5307 break;
5308
5309 case GIMPLE_ASM:
5310 {
5311 int i, n = gimple_asm_nlabels (stmt);
5312 tree label = NULL;
5313
5314 for (i = 0; i < n; ++i)
5315 {
5316 tree cons = gimple_asm_label_op (stmt, i);
5317 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5318 {
5319 if (!label)
5320 label = gimple_block_label (dest);
5321 TREE_VALUE (cons) = label;
5322 }
5323 }
5324
5325 /* If we didn't find any label matching the former edge in the
5326 asm labels, we must be redirecting the fallthrough
5327 edge. */
5328 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5329 }
5330 break;
5331
5332 case GIMPLE_RETURN:
5333 gsi_remove (&gsi, true);
5334 e->flags |= EDGE_FALLTHRU;
5335 break;
5336
5337 case GIMPLE_OMP_RETURN:
5338 case GIMPLE_OMP_CONTINUE:
5339 case GIMPLE_OMP_SECTIONS_SWITCH:
5340 case GIMPLE_OMP_FOR:
5341 /* The edges from OMP constructs can be simply redirected. */
5342 break;
5343
5344 case GIMPLE_EH_DISPATCH:
5345 if (!(e->flags & EDGE_FALLTHRU))
5346 redirect_eh_dispatch_edge (stmt, e, dest);
5347 break;
5348
5349 case GIMPLE_TRANSACTION:
5350 /* The ABORT edge has a stored label associated with it, otherwise
5351 the edges are simply redirectable. */
5352 if (e->flags == 0)
5353 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5354 break;
5355
5356 default:
5357 /* Otherwise it must be a fallthru edge, and we don't need to
5358 do anything besides redirecting it. */
5359 gcc_assert (e->flags & EDGE_FALLTHRU);
5360 break;
5361 }
5362
5363 /* Update/insert PHI nodes as necessary. */
5364
5365 /* Now update the edges in the CFG. */
5366 e = ssa_redirect_edge (e, dest);
5367
5368 return e;
5369 }
5370
5371 /* Returns true if it is possible to remove edge E by redirecting
5372 it to the destination of the other edge from E->src. */
5373
5374 static bool
5375 gimple_can_remove_branch_p (const_edge e)
5376 {
5377 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5378 return false;
5379
5380 return true;
5381 }
5382
5383 /* Simple wrapper, as we can always redirect fallthru edges. */
5384
5385 static basic_block
5386 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5387 {
5388 e = gimple_redirect_edge_and_branch (e, dest);
5389 gcc_assert (e);
5390
5391 return NULL;
5392 }
5393
5394
5395 /* Splits basic block BB after statement STMT (but at least after the
5396 labels). If STMT is NULL, BB is split just after the labels. */
5397
5398 static basic_block
5399 gimple_split_block (basic_block bb, void *stmt)
5400 {
5401 gimple_stmt_iterator gsi;
5402 gimple_stmt_iterator gsi_tgt;
5403 gimple act;
5404 gimple_seq list;
5405 basic_block new_bb;
5406 edge e;
5407 edge_iterator ei;
5408
5409 new_bb = create_empty_bb (bb);
5410
5411 /* Redirect the outgoing edges. */
5412 new_bb->succs = bb->succs;
5413 bb->succs = NULL;
5414 FOR_EACH_EDGE (e, ei, new_bb->succs)
5415 e->src = new_bb;
5416
5417 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5418 stmt = NULL;
5419
5420 /* Move everything from GSI to the new basic block. */
5421 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5422 {
5423 act = gsi_stmt (gsi);
5424 if (gimple_code (act) == GIMPLE_LABEL)
5425 continue;
5426
5427 if (!stmt)
5428 break;
5429
5430 if (stmt == act)
5431 {
5432 gsi_next (&gsi);
5433 break;
5434 }
5435 }
5436
5437 if (gsi_end_p (gsi))
5438 return new_bb;
5439
5440 /* Split the statement list - avoid re-creating new containers as this
5441 brings ugly quadratic memory consumption in the inliner.
5442 (We are still quadratic since we need to update stmt BB pointers,
5443 sadly.) */
5444 gsi_split_seq_before (&gsi, &list);
5445 set_bb_seq (new_bb, list);
5446 for (gsi_tgt = gsi_start (list);
5447 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5448 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5449
5450 return new_bb;
5451 }
5452
5453
5454 /* Moves basic block BB after block AFTER. */
5455
5456 static bool
5457 gimple_move_block_after (basic_block bb, basic_block after)
5458 {
5459 if (bb->prev_bb == after)
5460 return true;
5461
5462 unlink_block (bb);
5463 link_block (bb, after);
5464
5465 return true;
5466 }
5467
5468
5469 /* Return TRUE if block BB has no executable statements, otherwise return
5470 FALSE. */
5471
5472 static bool
5473 gimple_empty_block_p (basic_block bb)
5474 {
5475 /* BB must have no executable statements. */
5476 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5477 if (phi_nodes (bb))
5478 return false;
5479 if (gsi_end_p (gsi))
5480 return true;
5481 if (is_gimple_debug (gsi_stmt (gsi)))
5482 gsi_next_nondebug (&gsi);
5483 return gsi_end_p (gsi);
5484 }
5485
5486
5487 /* Split a basic block if it ends with a conditional branch and if the
5488 other part of the block is not empty. */
5489
5490 static basic_block
5491 gimple_split_block_before_cond_jump (basic_block bb)
5492 {
5493 gimple last, split_point;
5494 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5495 if (gsi_end_p (gsi))
5496 return NULL;
5497 last = gsi_stmt (gsi);
5498 if (gimple_code (last) != GIMPLE_COND
5499 && gimple_code (last) != GIMPLE_SWITCH)
5500 return NULL;
5501 gsi_prev_nondebug (&gsi);
5502 split_point = gsi_stmt (gsi);
5503 return split_block (bb, split_point)->dest;
5504 }
5505
5506
5507 /* Return true if basic_block can be duplicated. */
5508
5509 static bool
5510 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5511 {
5512 return true;
5513 }
5514
5515 /* Create a duplicate of the basic block BB. NOTE: This does not
5516 preserve SSA form. */
5517
5518 static basic_block
5519 gimple_duplicate_bb (basic_block bb)
5520 {
5521 basic_block new_bb;
5522 gimple_stmt_iterator gsi, gsi_tgt;
5523 gimple_seq phis = phi_nodes (bb);
5524 gimple phi, stmt, copy;
5525
5526 new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
5527
5528 /* Copy the PHI nodes. We ignore PHI node arguments here because
5529 the incoming edges have not been setup yet. */
5530 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5531 {
5532 phi = gsi_stmt (gsi);
5533 copy = create_phi_node (NULL_TREE, new_bb);
5534 create_new_def_for (gimple_phi_result (phi), copy,
5535 gimple_phi_result_ptr (copy));
5536 gimple_set_uid (copy, gimple_uid (phi));
5537 }
5538
5539 gsi_tgt = gsi_start_bb (new_bb);
5540 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5541 {
5542 def_operand_p def_p;
5543 ssa_op_iter op_iter;
5544 tree lhs;
5545
5546 stmt = gsi_stmt (gsi);
5547 if (gimple_code (stmt) == GIMPLE_LABEL)
5548 continue;
5549
5550 /* Don't duplicate label debug stmts. */
5551 if (gimple_debug_bind_p (stmt)
5552 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5553 == LABEL_DECL)
5554 continue;
5555
5556 /* Create a new copy of STMT and duplicate STMT's virtual
5557 operands. */
5558 copy = gimple_copy (stmt);
5559 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5560
5561 maybe_duplicate_eh_stmt (copy, stmt);
5562 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5563
5564 /* When copying around a stmt writing into a local non-user
5565 aggregate, make sure it won't share stack slot with other
5566 vars. */
5567 lhs = gimple_get_lhs (stmt);
5568 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5569 {
5570 tree base = get_base_address (lhs);
5571 if (base
5572 && (TREE_CODE (base) == VAR_DECL
5573 || TREE_CODE (base) == RESULT_DECL)
5574 && DECL_IGNORED_P (base)
5575 && !TREE_STATIC (base)
5576 && !DECL_EXTERNAL (base)
5577 && (TREE_CODE (base) != VAR_DECL
5578 || !DECL_HAS_VALUE_EXPR_P (base)))
5579 DECL_NONSHAREABLE (base) = 1;
5580 }
5581
5582 /* Create new names for all the definitions created by COPY and
5583 add replacement mappings for each new name. */
5584 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5585 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5586 }
5587
5588 return new_bb;
5589 }
5590
5591 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5592
5593 static void
5594 add_phi_args_after_copy_edge (edge e_copy)
5595 {
5596 basic_block bb, bb_copy = e_copy->src, dest;
5597 edge e;
5598 edge_iterator ei;
5599 gimple phi, phi_copy;
5600 tree def;
5601 gimple_stmt_iterator psi, psi_copy;
5602
5603 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5604 return;
5605
5606 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5607
5608 if (e_copy->dest->flags & BB_DUPLICATED)
5609 dest = get_bb_original (e_copy->dest);
5610 else
5611 dest = e_copy->dest;
5612
5613 e = find_edge (bb, dest);
5614 if (!e)
5615 {
5616 /* During loop unrolling the target of the latch edge is copied.
5617 In this case we are not looking for edge to dest, but to
5618 duplicated block whose original was dest. */
5619 FOR_EACH_EDGE (e, ei, bb->succs)
5620 {
5621 if ((e->dest->flags & BB_DUPLICATED)
5622 && get_bb_original (e->dest) == dest)
5623 break;
5624 }
5625
5626 gcc_assert (e != NULL);
5627 }
5628
5629 for (psi = gsi_start_phis (e->dest),
5630 psi_copy = gsi_start_phis (e_copy->dest);
5631 !gsi_end_p (psi);
5632 gsi_next (&psi), gsi_next (&psi_copy))
5633 {
5634 phi = gsi_stmt (psi);
5635 phi_copy = gsi_stmt (psi_copy);
5636 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5637 add_phi_arg (phi_copy, def, e_copy,
5638 gimple_phi_arg_location_from_edge (phi, e));
5639 }
5640 }
5641
5642
5643 /* Basic block BB_COPY was created by code duplication. Add phi node
5644 arguments for edges going out of BB_COPY. The blocks that were
5645 duplicated have BB_DUPLICATED set. */
5646
5647 void
5648 add_phi_args_after_copy_bb (basic_block bb_copy)
5649 {
5650 edge e_copy;
5651 edge_iterator ei;
5652
5653 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5654 {
5655 add_phi_args_after_copy_edge (e_copy);
5656 }
5657 }
5658
5659 /* Blocks in REGION_COPY array of length N_REGION were created by
5660 duplication of basic blocks. Add phi node arguments for edges
5661 going from these blocks. If E_COPY is not NULL, also add
5662 phi node arguments for its destination.*/
5663
5664 void
5665 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5666 edge e_copy)
5667 {
5668 unsigned i;
5669
5670 for (i = 0; i < n_region; i++)
5671 region_copy[i]->flags |= BB_DUPLICATED;
5672
5673 for (i = 0; i < n_region; i++)
5674 add_phi_args_after_copy_bb (region_copy[i]);
5675 if (e_copy)
5676 add_phi_args_after_copy_edge (e_copy);
5677
5678 for (i = 0; i < n_region; i++)
5679 region_copy[i]->flags &= ~BB_DUPLICATED;
5680 }
5681
5682 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5683 important exit edge EXIT. By important we mean that no SSA name defined
5684 inside region is live over the other exit edges of the region. All entry
5685 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5686 to the duplicate of the region. Dominance and loop information is
5687 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5688 UPDATE_DOMINANCE is false then we assume that the caller will update the
5689 dominance information after calling this function. The new basic
5690 blocks are stored to REGION_COPY in the same order as they had in REGION,
5691 provided that REGION_COPY is not NULL.
5692 The function returns false if it is unable to copy the region,
5693 true otherwise. */
5694
5695 bool
5696 gimple_duplicate_sese_region (edge entry, edge exit,
5697 basic_block *region, unsigned n_region,
5698 basic_block *region_copy,
5699 bool update_dominance)
5700 {
5701 unsigned i;
5702 bool free_region_copy = false, copying_header = false;
5703 struct loop *loop = entry->dest->loop_father;
5704 edge exit_copy;
5705 vec<basic_block> doms;
5706 edge redirected;
5707 int total_freq = 0, entry_freq = 0;
5708 gcov_type total_count = 0, entry_count = 0;
5709
5710 if (!can_copy_bbs_p (region, n_region))
5711 return false;
5712
5713 /* Some sanity checking. Note that we do not check for all possible
5714 missuses of the functions. I.e. if you ask to copy something weird,
5715 it will work, but the state of structures probably will not be
5716 correct. */
5717 for (i = 0; i < n_region; i++)
5718 {
5719 /* We do not handle subloops, i.e. all the blocks must belong to the
5720 same loop. */
5721 if (region[i]->loop_father != loop)
5722 return false;
5723
5724 if (region[i] != entry->dest
5725 && region[i] == loop->header)
5726 return false;
5727 }
5728
5729 set_loop_copy (loop, loop);
5730
5731 /* In case the function is used for loop header copying (which is the primary
5732 use), ensure that EXIT and its copy will be new latch and entry edges. */
5733 if (loop->header == entry->dest)
5734 {
5735 copying_header = true;
5736 set_loop_copy (loop, loop_outer (loop));
5737
5738 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5739 return false;
5740
5741 for (i = 0; i < n_region; i++)
5742 if (region[i] != exit->src
5743 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5744 return false;
5745 }
5746
5747 if (!region_copy)
5748 {
5749 region_copy = XNEWVEC (basic_block, n_region);
5750 free_region_copy = true;
5751 }
5752
5753 initialize_original_copy_tables ();
5754
5755 /* Record blocks outside the region that are dominated by something
5756 inside. */
5757 if (update_dominance)
5758 {
5759 doms.create (0);
5760 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5761 }
5762
5763 if (entry->dest->count)
5764 {
5765 total_count = entry->dest->count;
5766 entry_count = entry->count;
5767 /* Fix up corner cases, to avoid division by zero or creation of negative
5768 frequencies. */
5769 if (entry_count > total_count)
5770 entry_count = total_count;
5771 }
5772 else
5773 {
5774 total_freq = entry->dest->frequency;
5775 entry_freq = EDGE_FREQUENCY (entry);
5776 /* Fix up corner cases, to avoid division by zero or creation of negative
5777 frequencies. */
5778 if (total_freq == 0)
5779 total_freq = 1;
5780 else if (entry_freq > total_freq)
5781 entry_freq = total_freq;
5782 }
5783
5784 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
5785 split_edge_bb_loc (entry), update_dominance);
5786 if (total_count)
5787 {
5788 scale_bbs_frequencies_gcov_type (region, n_region,
5789 total_count - entry_count,
5790 total_count);
5791 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
5792 total_count);
5793 }
5794 else
5795 {
5796 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
5797 total_freq);
5798 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
5799 }
5800
5801 if (copying_header)
5802 {
5803 loop->header = exit->dest;
5804 loop->latch = exit->src;
5805 }
5806
5807 /* Redirect the entry and add the phi node arguments. */
5808 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
5809 gcc_assert (redirected != NULL);
5810 flush_pending_stmts (entry);
5811
5812 /* Concerning updating of dominators: We must recount dominators
5813 for entry block and its copy. Anything that is outside of the
5814 region, but was dominated by something inside needs recounting as
5815 well. */
5816 if (update_dominance)
5817 {
5818 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
5819 doms.safe_push (get_bb_original (entry->dest));
5820 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
5821 doms.release ();
5822 }
5823
5824 /* Add the other PHI node arguments. */
5825 add_phi_args_after_copy (region_copy, n_region, NULL);
5826
5827 if (free_region_copy)
5828 free (region_copy);
5829
5830 free_original_copy_tables ();
5831 return true;
5832 }
5833
5834 /* Checks if BB is part of the region defined by N_REGION BBS. */
5835 static bool
5836 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
5837 {
5838 unsigned int n;
5839
5840 for (n = 0; n < n_region; n++)
5841 {
5842 if (bb == bbs[n])
5843 return true;
5844 }
5845 return false;
5846 }
5847
5848 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
5849 are stored to REGION_COPY in the same order in that they appear
5850 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
5851 the region, EXIT an exit from it. The condition guarding EXIT
5852 is moved to ENTRY. Returns true if duplication succeeds, false
5853 otherwise.
5854
5855 For example,
5856
5857 some_code;
5858 if (cond)
5859 A;
5860 else
5861 B;
5862
5863 is transformed to
5864
5865 if (cond)
5866 {
5867 some_code;
5868 A;
5869 }
5870 else
5871 {
5872 some_code;
5873 B;
5874 }
5875 */
5876
5877 bool
5878 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
5879 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
5880 basic_block *region_copy ATTRIBUTE_UNUSED)
5881 {
5882 unsigned i;
5883 bool free_region_copy = false;
5884 struct loop *loop = exit->dest->loop_father;
5885 struct loop *orig_loop = entry->dest->loop_father;
5886 basic_block switch_bb, entry_bb, nentry_bb;
5887 vec<basic_block> doms;
5888 int total_freq = 0, exit_freq = 0;
5889 gcov_type total_count = 0, exit_count = 0;
5890 edge exits[2], nexits[2], e;
5891 gimple_stmt_iterator gsi;
5892 gimple cond_stmt;
5893 edge sorig, snew;
5894 basic_block exit_bb;
5895 gimple_stmt_iterator psi;
5896 gimple phi;
5897 tree def;
5898 struct loop *target, *aloop, *cloop;
5899
5900 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
5901 exits[0] = exit;
5902 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
5903
5904 if (!can_copy_bbs_p (region, n_region))
5905 return false;
5906
5907 initialize_original_copy_tables ();
5908 set_loop_copy (orig_loop, loop);
5909
5910 target= loop;
5911 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
5912 {
5913 if (bb_part_of_region_p (aloop->header, region, n_region))
5914 {
5915 cloop = duplicate_loop (aloop, target);
5916 duplicate_subloops (aloop, cloop);
5917 }
5918 }
5919
5920 if (!region_copy)
5921 {
5922 region_copy = XNEWVEC (basic_block, n_region);
5923 free_region_copy = true;
5924 }
5925
5926 gcc_assert (!need_ssa_update_p (cfun));
5927
5928 /* Record blocks outside the region that are dominated by something
5929 inside. */
5930 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5931
5932 if (exit->src->count)
5933 {
5934 total_count = exit->src->count;
5935 exit_count = exit->count;
5936 /* Fix up corner cases, to avoid division by zero or creation of negative
5937 frequencies. */
5938 if (exit_count > total_count)
5939 exit_count = total_count;
5940 }
5941 else
5942 {
5943 total_freq = exit->src->frequency;
5944 exit_freq = EDGE_FREQUENCY (exit);
5945 /* Fix up corner cases, to avoid division by zero or creation of negative
5946 frequencies. */
5947 if (total_freq == 0)
5948 total_freq = 1;
5949 if (exit_freq > total_freq)
5950 exit_freq = total_freq;
5951 }
5952
5953 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
5954 split_edge_bb_loc (exit), true);
5955 if (total_count)
5956 {
5957 scale_bbs_frequencies_gcov_type (region, n_region,
5958 total_count - exit_count,
5959 total_count);
5960 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
5961 total_count);
5962 }
5963 else
5964 {
5965 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
5966 total_freq);
5967 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
5968 }
5969
5970 /* Create the switch block, and put the exit condition to it. */
5971 entry_bb = entry->dest;
5972 nentry_bb = get_bb_copy (entry_bb);
5973 if (!last_stmt (entry->src)
5974 || !stmt_ends_bb_p (last_stmt (entry->src)))
5975 switch_bb = entry->src;
5976 else
5977 switch_bb = split_edge (entry);
5978 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
5979
5980 gsi = gsi_last_bb (switch_bb);
5981 cond_stmt = last_stmt (exit->src);
5982 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
5983 cond_stmt = gimple_copy (cond_stmt);
5984
5985 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
5986
5987 sorig = single_succ_edge (switch_bb);
5988 sorig->flags = exits[1]->flags;
5989 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
5990
5991 /* Register the new edge from SWITCH_BB in loop exit lists. */
5992 rescan_loop_exit (snew, true, false);
5993
5994 /* Add the PHI node arguments. */
5995 add_phi_args_after_copy (region_copy, n_region, snew);
5996
5997 /* Get rid of now superfluous conditions and associated edges (and phi node
5998 arguments). */
5999 exit_bb = exit->dest;
6000
6001 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6002 PENDING_STMT (e) = NULL;
6003
6004 /* The latch of ORIG_LOOP was copied, and so was the backedge
6005 to the original header. We redirect this backedge to EXIT_BB. */
6006 for (i = 0; i < n_region; i++)
6007 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6008 {
6009 gcc_assert (single_succ_edge (region_copy[i]));
6010 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6011 PENDING_STMT (e) = NULL;
6012 for (psi = gsi_start_phis (exit_bb);
6013 !gsi_end_p (psi);
6014 gsi_next (&psi))
6015 {
6016 phi = gsi_stmt (psi);
6017 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6018 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6019 }
6020 }
6021 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6022 PENDING_STMT (e) = NULL;
6023
6024 /* Anything that is outside of the region, but was dominated by something
6025 inside needs to update dominance info. */
6026 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6027 doms.release ();
6028 /* Update the SSA web. */
6029 update_ssa (TODO_update_ssa);
6030
6031 if (free_region_copy)
6032 free (region_copy);
6033
6034 free_original_copy_tables ();
6035 return true;
6036 }
6037
6038 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6039 adding blocks when the dominator traversal reaches EXIT. This
6040 function silently assumes that ENTRY strictly dominates EXIT. */
6041
6042 void
6043 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6044 vec<basic_block> *bbs_p)
6045 {
6046 basic_block son;
6047
6048 for (son = first_dom_son (CDI_DOMINATORS, entry);
6049 son;
6050 son = next_dom_son (CDI_DOMINATORS, son))
6051 {
6052 bbs_p->safe_push (son);
6053 if (son != exit)
6054 gather_blocks_in_sese_region (son, exit, bbs_p);
6055 }
6056 }
6057
6058 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6059 The duplicates are recorded in VARS_MAP. */
6060
6061 static void
6062 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6063 tree to_context)
6064 {
6065 tree t = *tp, new_t;
6066 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6067 void **loc;
6068
6069 if (DECL_CONTEXT (t) == to_context)
6070 return;
6071
6072 loc = pointer_map_contains (vars_map, t);
6073
6074 if (!loc)
6075 {
6076 loc = pointer_map_insert (vars_map, t);
6077
6078 if (SSA_VAR_P (t))
6079 {
6080 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6081 add_local_decl (f, new_t);
6082 }
6083 else
6084 {
6085 gcc_assert (TREE_CODE (t) == CONST_DECL);
6086 new_t = copy_node (t);
6087 }
6088 DECL_CONTEXT (new_t) = to_context;
6089
6090 *loc = new_t;
6091 }
6092 else
6093 new_t = (tree) *loc;
6094
6095 *tp = new_t;
6096 }
6097
6098
6099 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6100 VARS_MAP maps old ssa names and var_decls to the new ones. */
6101
6102 static tree
6103 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6104 tree to_context)
6105 {
6106 void **loc;
6107 tree new_name;
6108
6109 gcc_assert (!virtual_operand_p (name));
6110
6111 loc = pointer_map_contains (vars_map, name);
6112
6113 if (!loc)
6114 {
6115 tree decl = SSA_NAME_VAR (name);
6116 if (decl)
6117 {
6118 replace_by_duplicate_decl (&decl, vars_map, to_context);
6119 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6120 decl, SSA_NAME_DEF_STMT (name));
6121 if (SSA_NAME_IS_DEFAULT_DEF (name))
6122 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6123 decl, new_name);
6124 }
6125 else
6126 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6127 name, SSA_NAME_DEF_STMT (name));
6128
6129 loc = pointer_map_insert (vars_map, name);
6130 *loc = new_name;
6131 }
6132 else
6133 new_name = (tree) *loc;
6134
6135 return new_name;
6136 }
6137
6138 struct move_stmt_d
6139 {
6140 tree orig_block;
6141 tree new_block;
6142 tree from_context;
6143 tree to_context;
6144 struct pointer_map_t *vars_map;
6145 htab_t new_label_map;
6146 struct pointer_map_t *eh_map;
6147 bool remap_decls_p;
6148 };
6149
6150 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6151 contained in *TP if it has been ORIG_BLOCK previously and change the
6152 DECL_CONTEXT of every local variable referenced in *TP. */
6153
6154 static tree
6155 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6156 {
6157 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6158 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6159 tree t = *tp;
6160
6161 if (EXPR_P (t))
6162 {
6163 tree block = TREE_BLOCK (t);
6164 if (block == p->orig_block
6165 || (p->orig_block == NULL_TREE
6166 && block != NULL_TREE))
6167 TREE_SET_BLOCK (t, p->new_block);
6168 #ifdef ENABLE_CHECKING
6169 else if (block != NULL_TREE)
6170 {
6171 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6172 block = BLOCK_SUPERCONTEXT (block);
6173 gcc_assert (block == p->orig_block);
6174 }
6175 #endif
6176 }
6177 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6178 {
6179 if (TREE_CODE (t) == SSA_NAME)
6180 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6181 else if (TREE_CODE (t) == LABEL_DECL)
6182 {
6183 if (p->new_label_map)
6184 {
6185 struct tree_map in, *out;
6186 in.base.from = t;
6187 out = (struct tree_map *)
6188 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6189 if (out)
6190 *tp = t = out->to;
6191 }
6192
6193 DECL_CONTEXT (t) = p->to_context;
6194 }
6195 else if (p->remap_decls_p)
6196 {
6197 /* Replace T with its duplicate. T should no longer appear in the
6198 parent function, so this looks wasteful; however, it may appear
6199 in referenced_vars, and more importantly, as virtual operands of
6200 statements, and in alias lists of other variables. It would be
6201 quite difficult to expunge it from all those places. ??? It might
6202 suffice to do this for addressable variables. */
6203 if ((TREE_CODE (t) == VAR_DECL
6204 && !is_global_var (t))
6205 || TREE_CODE (t) == CONST_DECL)
6206 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6207 }
6208 *walk_subtrees = 0;
6209 }
6210 else if (TYPE_P (t))
6211 *walk_subtrees = 0;
6212
6213 return NULL_TREE;
6214 }
6215
6216 /* Helper for move_stmt_r. Given an EH region number for the source
6217 function, map that to the duplicate EH regio number in the dest. */
6218
6219 static int
6220 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6221 {
6222 eh_region old_r, new_r;
6223 void **slot;
6224
6225 old_r = get_eh_region_from_number (old_nr);
6226 slot = pointer_map_contains (p->eh_map, old_r);
6227 new_r = (eh_region) *slot;
6228
6229 return new_r->index;
6230 }
6231
6232 /* Similar, but operate on INTEGER_CSTs. */
6233
6234 static tree
6235 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6236 {
6237 int old_nr, new_nr;
6238
6239 old_nr = tree_low_cst (old_t_nr, 0);
6240 new_nr = move_stmt_eh_region_nr (old_nr, p);
6241
6242 return build_int_cst (integer_type_node, new_nr);
6243 }
6244
6245 /* Like move_stmt_op, but for gimple statements.
6246
6247 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6248 contained in the current statement in *GSI_P and change the
6249 DECL_CONTEXT of every local variable referenced in the current
6250 statement. */
6251
6252 static tree
6253 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6254 struct walk_stmt_info *wi)
6255 {
6256 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6257 gimple stmt = gsi_stmt (*gsi_p);
6258 tree block = gimple_block (stmt);
6259
6260 if (block == p->orig_block
6261 || (p->orig_block == NULL_TREE
6262 && block != NULL_TREE))
6263 gimple_set_block (stmt, p->new_block);
6264
6265 switch (gimple_code (stmt))
6266 {
6267 case GIMPLE_CALL:
6268 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6269 {
6270 tree r, fndecl = gimple_call_fndecl (stmt);
6271 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6272 switch (DECL_FUNCTION_CODE (fndecl))
6273 {
6274 case BUILT_IN_EH_COPY_VALUES:
6275 r = gimple_call_arg (stmt, 1);
6276 r = move_stmt_eh_region_tree_nr (r, p);
6277 gimple_call_set_arg (stmt, 1, r);
6278 /* FALLTHRU */
6279
6280 case BUILT_IN_EH_POINTER:
6281 case BUILT_IN_EH_FILTER:
6282 r = gimple_call_arg (stmt, 0);
6283 r = move_stmt_eh_region_tree_nr (r, p);
6284 gimple_call_set_arg (stmt, 0, r);
6285 break;
6286
6287 default:
6288 break;
6289 }
6290 }
6291 break;
6292
6293 case GIMPLE_RESX:
6294 {
6295 int r = gimple_resx_region (stmt);
6296 r = move_stmt_eh_region_nr (r, p);
6297 gimple_resx_set_region (stmt, r);
6298 }
6299 break;
6300
6301 case GIMPLE_EH_DISPATCH:
6302 {
6303 int r = gimple_eh_dispatch_region (stmt);
6304 r = move_stmt_eh_region_nr (r, p);
6305 gimple_eh_dispatch_set_region (stmt, r);
6306 }
6307 break;
6308
6309 case GIMPLE_OMP_RETURN:
6310 case GIMPLE_OMP_CONTINUE:
6311 break;
6312 default:
6313 if (is_gimple_omp (stmt))
6314 {
6315 /* Do not remap variables inside OMP directives. Variables
6316 referenced in clauses and directive header belong to the
6317 parent function and should not be moved into the child
6318 function. */
6319 bool save_remap_decls_p = p->remap_decls_p;
6320 p->remap_decls_p = false;
6321 *handled_ops_p = true;
6322
6323 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6324 move_stmt_op, wi);
6325
6326 p->remap_decls_p = save_remap_decls_p;
6327 }
6328 break;
6329 }
6330
6331 return NULL_TREE;
6332 }
6333
6334 /* Move basic block BB from function CFUN to function DEST_FN. The
6335 block is moved out of the original linked list and placed after
6336 block AFTER in the new list. Also, the block is removed from the
6337 original array of blocks and placed in DEST_FN's array of blocks.
6338 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6339 updated to reflect the moved edges.
6340
6341 The local variables are remapped to new instances, VARS_MAP is used
6342 to record the mapping. */
6343
6344 static void
6345 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6346 basic_block after, bool update_edge_count_p,
6347 struct move_stmt_d *d)
6348 {
6349 struct control_flow_graph *cfg;
6350 edge_iterator ei;
6351 edge e;
6352 gimple_stmt_iterator si;
6353 unsigned old_len, new_len;
6354
6355 /* Remove BB from dominance structures. */
6356 delete_from_dominance_info (CDI_DOMINATORS, bb);
6357
6358 /* Move BB from its current loop to the copy in the new function. */
6359 if (current_loops)
6360 {
6361 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6362 if (new_loop)
6363 bb->loop_father = new_loop;
6364 }
6365
6366 /* Link BB to the new linked list. */
6367 move_block_after (bb, after);
6368
6369 /* Update the edge count in the corresponding flowgraphs. */
6370 if (update_edge_count_p)
6371 FOR_EACH_EDGE (e, ei, bb->succs)
6372 {
6373 cfun->cfg->x_n_edges--;
6374 dest_cfun->cfg->x_n_edges++;
6375 }
6376
6377 /* Remove BB from the original basic block array. */
6378 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6379 cfun->cfg->x_n_basic_blocks--;
6380
6381 /* Grow DEST_CFUN's basic block array if needed. */
6382 cfg = dest_cfun->cfg;
6383 cfg->x_n_basic_blocks++;
6384 if (bb->index >= cfg->x_last_basic_block)
6385 cfg->x_last_basic_block = bb->index + 1;
6386
6387 old_len = vec_safe_length (cfg->x_basic_block_info);
6388 if ((unsigned) cfg->x_last_basic_block >= old_len)
6389 {
6390 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6391 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6392 }
6393
6394 (*cfg->x_basic_block_info)[bb->index] = bb;
6395
6396 /* Remap the variables in phi nodes. */
6397 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6398 {
6399 gimple phi = gsi_stmt (si);
6400 use_operand_p use;
6401 tree op = PHI_RESULT (phi);
6402 ssa_op_iter oi;
6403 unsigned i;
6404
6405 if (virtual_operand_p (op))
6406 {
6407 /* Remove the phi nodes for virtual operands (alias analysis will be
6408 run for the new function, anyway). */
6409 remove_phi_node (&si, true);
6410 continue;
6411 }
6412
6413 SET_PHI_RESULT (phi,
6414 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6415 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6416 {
6417 op = USE_FROM_PTR (use);
6418 if (TREE_CODE (op) == SSA_NAME)
6419 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6420 }
6421
6422 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6423 {
6424 location_t locus = gimple_phi_arg_location (phi, i);
6425 tree block = LOCATION_BLOCK (locus);
6426
6427 if (locus == UNKNOWN_LOCATION)
6428 continue;
6429 if (d->orig_block == NULL_TREE || block == d->orig_block)
6430 {
6431 if (d->new_block == NULL_TREE)
6432 locus = LOCATION_LOCUS (locus);
6433 else
6434 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6435 gimple_phi_arg_set_location (phi, i, locus);
6436 }
6437 }
6438
6439 gsi_next (&si);
6440 }
6441
6442 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6443 {
6444 gimple stmt = gsi_stmt (si);
6445 struct walk_stmt_info wi;
6446
6447 memset (&wi, 0, sizeof (wi));
6448 wi.info = d;
6449 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6450
6451 if (gimple_code (stmt) == GIMPLE_LABEL)
6452 {
6453 tree label = gimple_label_label (stmt);
6454 int uid = LABEL_DECL_UID (label);
6455
6456 gcc_assert (uid > -1);
6457
6458 old_len = vec_safe_length (cfg->x_label_to_block_map);
6459 if (old_len <= (unsigned) uid)
6460 {
6461 new_len = 3 * uid / 2 + 1;
6462 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6463 }
6464
6465 (*cfg->x_label_to_block_map)[uid] = bb;
6466 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6467
6468 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6469
6470 if (uid >= dest_cfun->cfg->last_label_uid)
6471 dest_cfun->cfg->last_label_uid = uid + 1;
6472 }
6473
6474 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6475 remove_stmt_from_eh_lp_fn (cfun, stmt);
6476
6477 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6478 gimple_remove_stmt_histograms (cfun, stmt);
6479
6480 /* We cannot leave any operands allocated from the operand caches of
6481 the current function. */
6482 free_stmt_operands (stmt);
6483 push_cfun (dest_cfun);
6484 update_stmt (stmt);
6485 pop_cfun ();
6486 }
6487
6488 FOR_EACH_EDGE (e, ei, bb->succs)
6489 if (e->goto_locus != UNKNOWN_LOCATION)
6490 {
6491 tree block = LOCATION_BLOCK (e->goto_locus);
6492 if (d->orig_block == NULL_TREE
6493 || block == d->orig_block)
6494 e->goto_locus = d->new_block ?
6495 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6496 LOCATION_LOCUS (e->goto_locus);
6497 }
6498 }
6499
6500 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6501 the outermost EH region. Use REGION as the incoming base EH region. */
6502
6503 static eh_region
6504 find_outermost_region_in_block (struct function *src_cfun,
6505 basic_block bb, eh_region region)
6506 {
6507 gimple_stmt_iterator si;
6508
6509 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6510 {
6511 gimple stmt = gsi_stmt (si);
6512 eh_region stmt_region;
6513 int lp_nr;
6514
6515 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6516 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6517 if (stmt_region)
6518 {
6519 if (region == NULL)
6520 region = stmt_region;
6521 else if (stmt_region != region)
6522 {
6523 region = eh_region_outermost (src_cfun, stmt_region, region);
6524 gcc_assert (region != NULL);
6525 }
6526 }
6527 }
6528
6529 return region;
6530 }
6531
6532 static tree
6533 new_label_mapper (tree decl, void *data)
6534 {
6535 htab_t hash = (htab_t) data;
6536 struct tree_map *m;
6537 void **slot;
6538
6539 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6540
6541 m = XNEW (struct tree_map);
6542 m->hash = DECL_UID (decl);
6543 m->base.from = decl;
6544 m->to = create_artificial_label (UNKNOWN_LOCATION);
6545 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6546 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6547 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6548
6549 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6550 gcc_assert (*slot == NULL);
6551
6552 *slot = m;
6553
6554 return m->to;
6555 }
6556
6557 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6558 subblocks. */
6559
6560 static void
6561 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6562 tree to_context)
6563 {
6564 tree *tp, t;
6565
6566 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6567 {
6568 t = *tp;
6569 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6570 continue;
6571 replace_by_duplicate_decl (&t, vars_map, to_context);
6572 if (t != *tp)
6573 {
6574 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6575 {
6576 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6577 DECL_HAS_VALUE_EXPR_P (t) = 1;
6578 }
6579 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6580 *tp = t;
6581 }
6582 }
6583
6584 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6585 replace_block_vars_by_duplicates (block, vars_map, to_context);
6586 }
6587
6588 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6589 from FN1 to FN2. */
6590
6591 static void
6592 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6593 struct loop *loop)
6594 {
6595 /* Discard it from the old loop array. */
6596 (*get_loops (fn1))[loop->num] = NULL;
6597
6598 /* Place it in the new loop array, assigning it a new number. */
6599 loop->num = number_of_loops (fn2);
6600 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6601
6602 /* Recurse to children. */
6603 for (loop = loop->inner; loop; loop = loop->next)
6604 fixup_loop_arrays_after_move (fn1, fn2, loop);
6605 }
6606
6607 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6608 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6609 single basic block in the original CFG and the new basic block is
6610 returned. DEST_CFUN must not have a CFG yet.
6611
6612 Note that the region need not be a pure SESE region. Blocks inside
6613 the region may contain calls to abort/exit. The only restriction
6614 is that ENTRY_BB should be the only entry point and it must
6615 dominate EXIT_BB.
6616
6617 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6618 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6619 to the new function.
6620
6621 All local variables referenced in the region are assumed to be in
6622 the corresponding BLOCK_VARS and unexpanded variable lists
6623 associated with DEST_CFUN. */
6624
6625 basic_block
6626 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6627 basic_block exit_bb, tree orig_block)
6628 {
6629 vec<basic_block> bbs, dom_bbs;
6630 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6631 basic_block after, bb, *entry_pred, *exit_succ, abb;
6632 struct function *saved_cfun = cfun;
6633 int *entry_flag, *exit_flag;
6634 unsigned *entry_prob, *exit_prob;
6635 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6636 edge e;
6637 edge_iterator ei;
6638 htab_t new_label_map;
6639 struct pointer_map_t *vars_map, *eh_map;
6640 struct loop *loop = entry_bb->loop_father;
6641 struct loop *loop0 = get_loop (saved_cfun, 0);
6642 struct move_stmt_d d;
6643
6644 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6645 region. */
6646 gcc_assert (entry_bb != exit_bb
6647 && (!exit_bb
6648 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6649
6650 /* Collect all the blocks in the region. Manually add ENTRY_BB
6651 because it won't be added by dfs_enumerate_from. */
6652 bbs.create (0);
6653 bbs.safe_push (entry_bb);
6654 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6655
6656 /* The blocks that used to be dominated by something in BBS will now be
6657 dominated by the new block. */
6658 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6659 bbs.address (),
6660 bbs.length ());
6661
6662 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6663 the predecessor edges to ENTRY_BB and the successor edges to
6664 EXIT_BB so that we can re-attach them to the new basic block that
6665 will replace the region. */
6666 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6667 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6668 entry_flag = XNEWVEC (int, num_entry_edges);
6669 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6670 i = 0;
6671 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6672 {
6673 entry_prob[i] = e->probability;
6674 entry_flag[i] = e->flags;
6675 entry_pred[i++] = e->src;
6676 remove_edge (e);
6677 }
6678
6679 if (exit_bb)
6680 {
6681 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6682 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6683 exit_flag = XNEWVEC (int, num_exit_edges);
6684 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6685 i = 0;
6686 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6687 {
6688 exit_prob[i] = e->probability;
6689 exit_flag[i] = e->flags;
6690 exit_succ[i++] = e->dest;
6691 remove_edge (e);
6692 }
6693 }
6694 else
6695 {
6696 num_exit_edges = 0;
6697 exit_succ = NULL;
6698 exit_flag = NULL;
6699 exit_prob = NULL;
6700 }
6701
6702 /* Switch context to the child function to initialize DEST_FN's CFG. */
6703 gcc_assert (dest_cfun->cfg == NULL);
6704 push_cfun (dest_cfun);
6705
6706 init_empty_tree_cfg ();
6707
6708 /* Initialize EH information for the new function. */
6709 eh_map = NULL;
6710 new_label_map = NULL;
6711 if (saved_cfun->eh)
6712 {
6713 eh_region region = NULL;
6714
6715 FOR_EACH_VEC_ELT (bbs, i, bb)
6716 region = find_outermost_region_in_block (saved_cfun, bb, region);
6717
6718 init_eh_for_function ();
6719 if (region != NULL)
6720 {
6721 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6722 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6723 new_label_mapper, new_label_map);
6724 }
6725 }
6726
6727 /* Initialize an empty loop tree. */
6728 struct loops *loops = ggc_alloc_cleared_loops ();
6729 init_loops_structure (dest_cfun, loops, 1);
6730 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6731 set_loops_for_fn (dest_cfun, loops);
6732
6733 /* Move the outlined loop tree part. */
6734 num_nodes = bbs.length ();
6735 FOR_EACH_VEC_ELT (bbs, i, bb)
6736 {
6737 if (bb->loop_father->header == bb)
6738 {
6739 struct loop *this_loop = bb->loop_father;
6740 struct loop *outer = loop_outer (this_loop);
6741 if (outer == loop
6742 /* If the SESE region contains some bbs ending with
6743 a noreturn call, those are considered to belong
6744 to the outermost loop in saved_cfun, rather than
6745 the entry_bb's loop_father. */
6746 || outer == loop0)
6747 {
6748 if (outer != loop)
6749 num_nodes -= this_loop->num_nodes;
6750 flow_loop_tree_node_remove (bb->loop_father);
6751 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6752 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6753 }
6754 }
6755 else if (bb->loop_father == loop0 && loop0 != loop)
6756 num_nodes--;
6757
6758 /* Remove loop exits from the outlined region. */
6759 if (loops_for_fn (saved_cfun)->exits)
6760 FOR_EACH_EDGE (e, ei, bb->succs)
6761 {
6762 void **slot = htab_find_slot_with_hash
6763 (loops_for_fn (saved_cfun)->exits, e,
6764 htab_hash_pointer (e), NO_INSERT);
6765 if (slot)
6766 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
6767 }
6768 }
6769
6770
6771 /* Adjust the number of blocks in the tree root of the outlined part. */
6772 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
6773
6774 /* Setup a mapping to be used by move_block_to_fn. */
6775 loop->aux = current_loops->tree_root;
6776 loop0->aux = current_loops->tree_root;
6777
6778 pop_cfun ();
6779
6780 /* Move blocks from BBS into DEST_CFUN. */
6781 gcc_assert (bbs.length () >= 2);
6782 after = dest_cfun->cfg->x_entry_block_ptr;
6783 vars_map = pointer_map_create ();
6784
6785 memset (&d, 0, sizeof (d));
6786 d.orig_block = orig_block;
6787 d.new_block = DECL_INITIAL (dest_cfun->decl);
6788 d.from_context = cfun->decl;
6789 d.to_context = dest_cfun->decl;
6790 d.vars_map = vars_map;
6791 d.new_label_map = new_label_map;
6792 d.eh_map = eh_map;
6793 d.remap_decls_p = true;
6794
6795 FOR_EACH_VEC_ELT (bbs, i, bb)
6796 {
6797 /* No need to update edge counts on the last block. It has
6798 already been updated earlier when we detached the region from
6799 the original CFG. */
6800 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
6801 after = bb;
6802 }
6803
6804 loop->aux = NULL;
6805 loop0->aux = NULL;
6806 /* Loop sizes are no longer correct, fix them up. */
6807 loop->num_nodes -= num_nodes;
6808 for (struct loop *outer = loop_outer (loop);
6809 outer; outer = loop_outer (outer))
6810 outer->num_nodes -= num_nodes;
6811 loop0->num_nodes -= bbs.length () - num_nodes;
6812
6813 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
6814 {
6815 struct loop *aloop;
6816 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
6817 if (aloop != NULL)
6818 {
6819 if (aloop->simduid)
6820 {
6821 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
6822 d.to_context);
6823 dest_cfun->has_simduid_loops = true;
6824 }
6825 if (aloop->force_vect)
6826 dest_cfun->has_force_vect_loops = true;
6827 }
6828 }
6829
6830 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
6831 if (orig_block)
6832 {
6833 tree block;
6834 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6835 == NULL_TREE);
6836 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
6837 = BLOCK_SUBBLOCKS (orig_block);
6838 for (block = BLOCK_SUBBLOCKS (orig_block);
6839 block; block = BLOCK_CHAIN (block))
6840 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
6841 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
6842 }
6843
6844 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
6845 vars_map, dest_cfun->decl);
6846
6847 if (new_label_map)
6848 htab_delete (new_label_map);
6849 if (eh_map)
6850 pointer_map_destroy (eh_map);
6851 pointer_map_destroy (vars_map);
6852
6853 /* Rewire the entry and exit blocks. The successor to the entry
6854 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
6855 the child function. Similarly, the predecessor of DEST_FN's
6856 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
6857 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
6858 various CFG manipulation function get to the right CFG.
6859
6860 FIXME, this is silly. The CFG ought to become a parameter to
6861 these helpers. */
6862 push_cfun (dest_cfun);
6863 make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
6864 if (exit_bb)
6865 make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
6866 pop_cfun ();
6867
6868 /* Back in the original function, the SESE region has disappeared,
6869 create a new basic block in its place. */
6870 bb = create_empty_bb (entry_pred[0]);
6871 if (current_loops)
6872 add_bb_to_loop (bb, loop);
6873 for (i = 0; i < num_entry_edges; i++)
6874 {
6875 e = make_edge (entry_pred[i], bb, entry_flag[i]);
6876 e->probability = entry_prob[i];
6877 }
6878
6879 for (i = 0; i < num_exit_edges; i++)
6880 {
6881 e = make_edge (bb, exit_succ[i], exit_flag[i]);
6882 e->probability = exit_prob[i];
6883 }
6884
6885 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
6886 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
6887 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
6888 dom_bbs.release ();
6889
6890 if (exit_bb)
6891 {
6892 free (exit_prob);
6893 free (exit_flag);
6894 free (exit_succ);
6895 }
6896 free (entry_prob);
6897 free (entry_flag);
6898 free (entry_pred);
6899 bbs.release ();
6900
6901 return bb;
6902 }
6903
6904
6905 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
6906 */
6907
6908 void
6909 dump_function_to_file (tree fndecl, FILE *file, int flags)
6910 {
6911 tree arg, var, old_current_fndecl = current_function_decl;
6912 struct function *dsf;
6913 bool ignore_topmost_bind = false, any_var = false;
6914 basic_block bb;
6915 tree chain;
6916 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
6917 && decl_is_tm_clone (fndecl));
6918 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
6919
6920 current_function_decl = fndecl;
6921 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
6922
6923 arg = DECL_ARGUMENTS (fndecl);
6924 while (arg)
6925 {
6926 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
6927 fprintf (file, " ");
6928 print_generic_expr (file, arg, dump_flags);
6929 if (flags & TDF_VERBOSE)
6930 print_node (file, "", arg, 4);
6931 if (DECL_CHAIN (arg))
6932 fprintf (file, ", ");
6933 arg = DECL_CHAIN (arg);
6934 }
6935 fprintf (file, ")\n");
6936
6937 if (flags & TDF_VERBOSE)
6938 print_node (file, "", fndecl, 2);
6939
6940 dsf = DECL_STRUCT_FUNCTION (fndecl);
6941 if (dsf && (flags & TDF_EH))
6942 dump_eh_tree (file, dsf);
6943
6944 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
6945 {
6946 dump_node (fndecl, TDF_SLIM | flags, file);
6947 current_function_decl = old_current_fndecl;
6948 return;
6949 }
6950
6951 /* When GIMPLE is lowered, the variables are no longer available in
6952 BIND_EXPRs, so display them separately. */
6953 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
6954 {
6955 unsigned ix;
6956 ignore_topmost_bind = true;
6957
6958 fprintf (file, "{\n");
6959 if (!vec_safe_is_empty (fun->local_decls))
6960 FOR_EACH_LOCAL_DECL (fun, ix, var)
6961 {
6962 print_generic_decl (file, var, flags);
6963 if (flags & TDF_VERBOSE)
6964 print_node (file, "", var, 4);
6965 fprintf (file, "\n");
6966
6967 any_var = true;
6968 }
6969 if (gimple_in_ssa_p (cfun))
6970 for (ix = 1; ix < num_ssa_names; ++ix)
6971 {
6972 tree name = ssa_name (ix);
6973 if (name && !SSA_NAME_VAR (name))
6974 {
6975 fprintf (file, " ");
6976 print_generic_expr (file, TREE_TYPE (name), flags);
6977 fprintf (file, " ");
6978 print_generic_expr (file, name, flags);
6979 fprintf (file, ";\n");
6980
6981 any_var = true;
6982 }
6983 }
6984 }
6985
6986 if (fun && fun->decl == fndecl
6987 && fun->cfg
6988 && basic_block_info_for_function (fun))
6989 {
6990 /* If the CFG has been built, emit a CFG-based dump. */
6991 if (!ignore_topmost_bind)
6992 fprintf (file, "{\n");
6993
6994 if (any_var && n_basic_blocks_for_function (fun))
6995 fprintf (file, "\n");
6996
6997 FOR_EACH_BB_FN (bb, fun)
6998 dump_bb (file, bb, 2, flags | TDF_COMMENT);
6999
7000 fprintf (file, "}\n");
7001 }
7002 else if (DECL_SAVED_TREE (fndecl) == NULL)
7003 {
7004 /* The function is now in GIMPLE form but the CFG has not been
7005 built yet. Emit the single sequence of GIMPLE statements
7006 that make up its body. */
7007 gimple_seq body = gimple_body (fndecl);
7008
7009 if (gimple_seq_first_stmt (body)
7010 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7011 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7012 print_gimple_seq (file, body, 0, flags);
7013 else
7014 {
7015 if (!ignore_topmost_bind)
7016 fprintf (file, "{\n");
7017
7018 if (any_var)
7019 fprintf (file, "\n");
7020
7021 print_gimple_seq (file, body, 2, flags);
7022 fprintf (file, "}\n");
7023 }
7024 }
7025 else
7026 {
7027 int indent;
7028
7029 /* Make a tree based dump. */
7030 chain = DECL_SAVED_TREE (fndecl);
7031 if (chain && TREE_CODE (chain) == BIND_EXPR)
7032 {
7033 if (ignore_topmost_bind)
7034 {
7035 chain = BIND_EXPR_BODY (chain);
7036 indent = 2;
7037 }
7038 else
7039 indent = 0;
7040 }
7041 else
7042 {
7043 if (!ignore_topmost_bind)
7044 fprintf (file, "{\n");
7045 indent = 2;
7046 }
7047
7048 if (any_var)
7049 fprintf (file, "\n");
7050
7051 print_generic_stmt_indented (file, chain, flags, indent);
7052 if (ignore_topmost_bind)
7053 fprintf (file, "}\n");
7054 }
7055
7056 if (flags & TDF_ENUMERATE_LOCALS)
7057 dump_enumerated_decls (file, flags);
7058 fprintf (file, "\n\n");
7059
7060 current_function_decl = old_current_fndecl;
7061 }
7062
7063 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7064
7065 DEBUG_FUNCTION void
7066 debug_function (tree fn, int flags)
7067 {
7068 dump_function_to_file (fn, stderr, flags);
7069 }
7070
7071
7072 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7073
7074 static void
7075 print_pred_bbs (FILE *file, basic_block bb)
7076 {
7077 edge e;
7078 edge_iterator ei;
7079
7080 FOR_EACH_EDGE (e, ei, bb->preds)
7081 fprintf (file, "bb_%d ", e->src->index);
7082 }
7083
7084
7085 /* Print on FILE the indexes for the successors of basic_block BB. */
7086
7087 static void
7088 print_succ_bbs (FILE *file, basic_block bb)
7089 {
7090 edge e;
7091 edge_iterator ei;
7092
7093 FOR_EACH_EDGE (e, ei, bb->succs)
7094 fprintf (file, "bb_%d ", e->dest->index);
7095 }
7096
7097 /* Print to FILE the basic block BB following the VERBOSITY level. */
7098
7099 void
7100 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7101 {
7102 char *s_indent = (char *) alloca ((size_t) indent + 1);
7103 memset ((void *) s_indent, ' ', (size_t) indent);
7104 s_indent[indent] = '\0';
7105
7106 /* Print basic_block's header. */
7107 if (verbosity >= 2)
7108 {
7109 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7110 print_pred_bbs (file, bb);
7111 fprintf (file, "}, succs = {");
7112 print_succ_bbs (file, bb);
7113 fprintf (file, "})\n");
7114 }
7115
7116 /* Print basic_block's body. */
7117 if (verbosity >= 3)
7118 {
7119 fprintf (file, "%s {\n", s_indent);
7120 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7121 fprintf (file, "%s }\n", s_indent);
7122 }
7123 }
7124
7125 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7126
7127 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7128 VERBOSITY level this outputs the contents of the loop, or just its
7129 structure. */
7130
7131 static void
7132 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7133 {
7134 char *s_indent;
7135 basic_block bb;
7136
7137 if (loop == NULL)
7138 return;
7139
7140 s_indent = (char *) alloca ((size_t) indent + 1);
7141 memset ((void *) s_indent, ' ', (size_t) indent);
7142 s_indent[indent] = '\0';
7143
7144 /* Print loop's header. */
7145 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7146 if (loop->header)
7147 fprintf (file, "header = %d", loop->header->index);
7148 else
7149 {
7150 fprintf (file, "deleted)\n");
7151 return;
7152 }
7153 if (loop->latch)
7154 fprintf (file, ", latch = %d", loop->latch->index);
7155 else
7156 fprintf (file, ", multiple latches");
7157 fprintf (file, ", niter = ");
7158 print_generic_expr (file, loop->nb_iterations, 0);
7159
7160 if (loop->any_upper_bound)
7161 {
7162 fprintf (file, ", upper_bound = ");
7163 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7164 }
7165
7166 if (loop->any_estimate)
7167 {
7168 fprintf (file, ", estimate = ");
7169 dump_double_int (file, loop->nb_iterations_estimate, true);
7170 }
7171 fprintf (file, ")\n");
7172
7173 /* Print loop's body. */
7174 if (verbosity >= 1)
7175 {
7176 fprintf (file, "%s{\n", s_indent);
7177 FOR_EACH_BB (bb)
7178 if (bb->loop_father == loop)
7179 print_loops_bb (file, bb, indent, verbosity);
7180
7181 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7182 fprintf (file, "%s}\n", s_indent);
7183 }
7184 }
7185
7186 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7187 spaces. Following VERBOSITY level this outputs the contents of the
7188 loop, or just its structure. */
7189
7190 static void
7191 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7192 int verbosity)
7193 {
7194 if (loop == NULL)
7195 return;
7196
7197 print_loop (file, loop, indent, verbosity);
7198 print_loop_and_siblings (file, loop->next, indent, verbosity);
7199 }
7200
7201 /* Follow a CFG edge from the entry point of the program, and on entry
7202 of a loop, pretty print the loop structure on FILE. */
7203
7204 void
7205 print_loops (FILE *file, int verbosity)
7206 {
7207 basic_block bb;
7208
7209 bb = ENTRY_BLOCK_PTR;
7210 if (bb && bb->loop_father)
7211 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7212 }
7213
7214 /* Dump a loop. */
7215
7216 DEBUG_FUNCTION void
7217 debug (struct loop &ref)
7218 {
7219 print_loop (stderr, &ref, 0, /*verbosity*/0);
7220 }
7221
7222 DEBUG_FUNCTION void
7223 debug (struct loop *ptr)
7224 {
7225 if (ptr)
7226 debug (*ptr);
7227 else
7228 fprintf (stderr, "<nil>\n");
7229 }
7230
7231 /* Dump a loop verbosely. */
7232
7233 DEBUG_FUNCTION void
7234 debug_verbose (struct loop &ref)
7235 {
7236 print_loop (stderr, &ref, 0, /*verbosity*/3);
7237 }
7238
7239 DEBUG_FUNCTION void
7240 debug_verbose (struct loop *ptr)
7241 {
7242 if (ptr)
7243 debug (*ptr);
7244 else
7245 fprintf (stderr, "<nil>\n");
7246 }
7247
7248
7249 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7250
7251 DEBUG_FUNCTION void
7252 debug_loops (int verbosity)
7253 {
7254 print_loops (stderr, verbosity);
7255 }
7256
7257 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7258
7259 DEBUG_FUNCTION void
7260 debug_loop (struct loop *loop, int verbosity)
7261 {
7262 print_loop (stderr, loop, 0, verbosity);
7263 }
7264
7265 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7266 level. */
7267
7268 DEBUG_FUNCTION void
7269 debug_loop_num (unsigned num, int verbosity)
7270 {
7271 debug_loop (get_loop (cfun, num), verbosity);
7272 }
7273
7274 /* Return true if BB ends with a call, possibly followed by some
7275 instructions that must stay with the call. Return false,
7276 otherwise. */
7277
7278 static bool
7279 gimple_block_ends_with_call_p (basic_block bb)
7280 {
7281 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7282 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7283 }
7284
7285
7286 /* Return true if BB ends with a conditional branch. Return false,
7287 otherwise. */
7288
7289 static bool
7290 gimple_block_ends_with_condjump_p (const_basic_block bb)
7291 {
7292 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7293 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7294 }
7295
7296
7297 /* Return true if we need to add fake edge to exit at statement T.
7298 Helper function for gimple_flow_call_edges_add. */
7299
7300 static bool
7301 need_fake_edge_p (gimple t)
7302 {
7303 tree fndecl = NULL_TREE;
7304 int call_flags = 0;
7305
7306 /* NORETURN and LONGJMP calls already have an edge to exit.
7307 CONST and PURE calls do not need one.
7308 We don't currently check for CONST and PURE here, although
7309 it would be a good idea, because those attributes are
7310 figured out from the RTL in mark_constant_function, and
7311 the counter incrementation code from -fprofile-arcs
7312 leads to different results from -fbranch-probabilities. */
7313 if (is_gimple_call (t))
7314 {
7315 fndecl = gimple_call_fndecl (t);
7316 call_flags = gimple_call_flags (t);
7317 }
7318
7319 if (is_gimple_call (t)
7320 && fndecl
7321 && DECL_BUILT_IN (fndecl)
7322 && (call_flags & ECF_NOTHROW)
7323 && !(call_flags & ECF_RETURNS_TWICE)
7324 /* fork() doesn't really return twice, but the effect of
7325 wrapping it in __gcov_fork() which calls __gcov_flush()
7326 and clears the counters before forking has the same
7327 effect as returning twice. Force a fake edge. */
7328 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7329 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7330 return false;
7331
7332 if (is_gimple_call (t))
7333 {
7334 edge_iterator ei;
7335 edge e;
7336 basic_block bb;
7337
7338 if (!(call_flags & ECF_NORETURN))
7339 return true;
7340
7341 bb = gimple_bb (t);
7342 FOR_EACH_EDGE (e, ei, bb->succs)
7343 if ((e->flags & EDGE_FAKE) == 0)
7344 return true;
7345 }
7346
7347 if (gimple_code (t) == GIMPLE_ASM
7348 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7349 return true;
7350
7351 return false;
7352 }
7353
7354
7355 /* Add fake edges to the function exit for any non constant and non
7356 noreturn calls (or noreturn calls with EH/abnormal edges),
7357 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7358 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7359 that were split.
7360
7361 The goal is to expose cases in which entering a basic block does
7362 not imply that all subsequent instructions must be executed. */
7363
7364 static int
7365 gimple_flow_call_edges_add (sbitmap blocks)
7366 {
7367 int i;
7368 int blocks_split = 0;
7369 int last_bb = last_basic_block;
7370 bool check_last_block = false;
7371
7372 if (n_basic_blocks == NUM_FIXED_BLOCKS)
7373 return 0;
7374
7375 if (! blocks)
7376 check_last_block = true;
7377 else
7378 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
7379
7380 /* In the last basic block, before epilogue generation, there will be
7381 a fallthru edge to EXIT. Special care is required if the last insn
7382 of the last basic block is a call because make_edge folds duplicate
7383 edges, which would result in the fallthru edge also being marked
7384 fake, which would result in the fallthru edge being removed by
7385 remove_fake_edges, which would result in an invalid CFG.
7386
7387 Moreover, we can't elide the outgoing fake edge, since the block
7388 profiler needs to take this into account in order to solve the minimal
7389 spanning tree in the case that the call doesn't return.
7390
7391 Handle this by adding a dummy instruction in a new last basic block. */
7392 if (check_last_block)
7393 {
7394 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
7395 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7396 gimple t = NULL;
7397
7398 if (!gsi_end_p (gsi))
7399 t = gsi_stmt (gsi);
7400
7401 if (t && need_fake_edge_p (t))
7402 {
7403 edge e;
7404
7405 e = find_edge (bb, EXIT_BLOCK_PTR);
7406 if (e)
7407 {
7408 gsi_insert_on_edge (e, gimple_build_nop ());
7409 gsi_commit_edge_inserts ();
7410 }
7411 }
7412 }
7413
7414 /* Now add fake edges to the function exit for any non constant
7415 calls since there is no way that we can determine if they will
7416 return or not... */
7417 for (i = 0; i < last_bb; i++)
7418 {
7419 basic_block bb = BASIC_BLOCK (i);
7420 gimple_stmt_iterator gsi;
7421 gimple stmt, last_stmt;
7422
7423 if (!bb)
7424 continue;
7425
7426 if (blocks && !bitmap_bit_p (blocks, i))
7427 continue;
7428
7429 gsi = gsi_last_nondebug_bb (bb);
7430 if (!gsi_end_p (gsi))
7431 {
7432 last_stmt = gsi_stmt (gsi);
7433 do
7434 {
7435 stmt = gsi_stmt (gsi);
7436 if (need_fake_edge_p (stmt))
7437 {
7438 edge e;
7439
7440 /* The handling above of the final block before the
7441 epilogue should be enough to verify that there is
7442 no edge to the exit block in CFG already.
7443 Calling make_edge in such case would cause us to
7444 mark that edge as fake and remove it later. */
7445 #ifdef ENABLE_CHECKING
7446 if (stmt == last_stmt)
7447 {
7448 e = find_edge (bb, EXIT_BLOCK_PTR);
7449 gcc_assert (e == NULL);
7450 }
7451 #endif
7452
7453 /* Note that the following may create a new basic block
7454 and renumber the existing basic blocks. */
7455 if (stmt != last_stmt)
7456 {
7457 e = split_block (bb, stmt);
7458 if (e)
7459 blocks_split++;
7460 }
7461 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
7462 }
7463 gsi_prev (&gsi);
7464 }
7465 while (!gsi_end_p (gsi));
7466 }
7467 }
7468
7469 if (blocks_split)
7470 verify_flow_info ();
7471
7472 return blocks_split;
7473 }
7474
7475 /* Removes edge E and all the blocks dominated by it, and updates dominance
7476 information. The IL in E->src needs to be updated separately.
7477 If dominance info is not available, only the edge E is removed.*/
7478
7479 void
7480 remove_edge_and_dominated_blocks (edge e)
7481 {
7482 vec<basic_block> bbs_to_remove = vNULL;
7483 vec<basic_block> bbs_to_fix_dom = vNULL;
7484 bitmap df, df_idom;
7485 edge f;
7486 edge_iterator ei;
7487 bool none_removed = false;
7488 unsigned i;
7489 basic_block bb, dbb;
7490 bitmap_iterator bi;
7491
7492 if (!dom_info_available_p (CDI_DOMINATORS))
7493 {
7494 remove_edge (e);
7495 return;
7496 }
7497
7498 /* No updating is needed for edges to exit. */
7499 if (e->dest == EXIT_BLOCK_PTR)
7500 {
7501 if (cfgcleanup_altered_bbs)
7502 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7503 remove_edge (e);
7504 return;
7505 }
7506
7507 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7508 that is not dominated by E->dest, then this set is empty. Otherwise,
7509 all the basic blocks dominated by E->dest are removed.
7510
7511 Also, to DF_IDOM we store the immediate dominators of the blocks in
7512 the dominance frontier of E (i.e., of the successors of the
7513 removed blocks, if there are any, and of E->dest otherwise). */
7514 FOR_EACH_EDGE (f, ei, e->dest->preds)
7515 {
7516 if (f == e)
7517 continue;
7518
7519 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7520 {
7521 none_removed = true;
7522 break;
7523 }
7524 }
7525
7526 df = BITMAP_ALLOC (NULL);
7527 df_idom = BITMAP_ALLOC (NULL);
7528
7529 if (none_removed)
7530 bitmap_set_bit (df_idom,
7531 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7532 else
7533 {
7534 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7535 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7536 {
7537 FOR_EACH_EDGE (f, ei, bb->succs)
7538 {
7539 if (f->dest != EXIT_BLOCK_PTR)
7540 bitmap_set_bit (df, f->dest->index);
7541 }
7542 }
7543 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7544 bitmap_clear_bit (df, bb->index);
7545
7546 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7547 {
7548 bb = BASIC_BLOCK (i);
7549 bitmap_set_bit (df_idom,
7550 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7551 }
7552 }
7553
7554 if (cfgcleanup_altered_bbs)
7555 {
7556 /* Record the set of the altered basic blocks. */
7557 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7558 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7559 }
7560
7561 /* Remove E and the cancelled blocks. */
7562 if (none_removed)
7563 remove_edge (e);
7564 else
7565 {
7566 /* Walk backwards so as to get a chance to substitute all
7567 released DEFs into debug stmts. See
7568 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7569 details. */
7570 for (i = bbs_to_remove.length (); i-- > 0; )
7571 delete_basic_block (bbs_to_remove[i]);
7572 }
7573
7574 /* Update the dominance information. The immediate dominator may change only
7575 for blocks whose immediate dominator belongs to DF_IDOM:
7576
7577 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7578 removal. Let Z the arbitrary block such that idom(Z) = Y and
7579 Z dominates X after the removal. Before removal, there exists a path P
7580 from Y to X that avoids Z. Let F be the last edge on P that is
7581 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7582 dominates W, and because of P, Z does not dominate W), and W belongs to
7583 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7584 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7585 {
7586 bb = BASIC_BLOCK (i);
7587 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7588 dbb;
7589 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7590 bbs_to_fix_dom.safe_push (dbb);
7591 }
7592
7593 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7594
7595 BITMAP_FREE (df);
7596 BITMAP_FREE (df_idom);
7597 bbs_to_remove.release ();
7598 bbs_to_fix_dom.release ();
7599 }
7600
7601 /* Purge dead EH edges from basic block BB. */
7602
7603 bool
7604 gimple_purge_dead_eh_edges (basic_block bb)
7605 {
7606 bool changed = false;
7607 edge e;
7608 edge_iterator ei;
7609 gimple stmt = last_stmt (bb);
7610
7611 if (stmt && stmt_can_throw_internal (stmt))
7612 return false;
7613
7614 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7615 {
7616 if (e->flags & EDGE_EH)
7617 {
7618 remove_edge_and_dominated_blocks (e);
7619 changed = true;
7620 }
7621 else
7622 ei_next (&ei);
7623 }
7624
7625 return changed;
7626 }
7627
7628 /* Purge dead EH edges from basic block listed in BLOCKS. */
7629
7630 bool
7631 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7632 {
7633 bool changed = false;
7634 unsigned i;
7635 bitmap_iterator bi;
7636
7637 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7638 {
7639 basic_block bb = BASIC_BLOCK (i);
7640
7641 /* Earlier gimple_purge_dead_eh_edges could have removed
7642 this basic block already. */
7643 gcc_assert (bb || changed);
7644 if (bb != NULL)
7645 changed |= gimple_purge_dead_eh_edges (bb);
7646 }
7647
7648 return changed;
7649 }
7650
7651 /* Purge dead abnormal call edges from basic block BB. */
7652
7653 bool
7654 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7655 {
7656 bool changed = false;
7657 edge e;
7658 edge_iterator ei;
7659 gimple stmt = last_stmt (bb);
7660
7661 if (!cfun->has_nonlocal_label
7662 && !cfun->calls_setjmp)
7663 return false;
7664
7665 if (stmt && stmt_can_make_abnormal_goto (stmt))
7666 return false;
7667
7668 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7669 {
7670 if (e->flags & EDGE_ABNORMAL)
7671 {
7672 if (e->flags & EDGE_FALLTHRU)
7673 e->flags &= ~EDGE_ABNORMAL;
7674 else
7675 remove_edge_and_dominated_blocks (e);
7676 changed = true;
7677 }
7678 else
7679 ei_next (&ei);
7680 }
7681
7682 return changed;
7683 }
7684
7685 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7686
7687 bool
7688 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7689 {
7690 bool changed = false;
7691 unsigned i;
7692 bitmap_iterator bi;
7693
7694 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7695 {
7696 basic_block bb = BASIC_BLOCK (i);
7697
7698 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7699 this basic block already. */
7700 gcc_assert (bb || changed);
7701 if (bb != NULL)
7702 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7703 }
7704
7705 return changed;
7706 }
7707
7708 /* This function is called whenever a new edge is created or
7709 redirected. */
7710
7711 static void
7712 gimple_execute_on_growing_pred (edge e)
7713 {
7714 basic_block bb = e->dest;
7715
7716 if (!gimple_seq_empty_p (phi_nodes (bb)))
7717 reserve_phi_args_for_new_edge (bb);
7718 }
7719
7720 /* This function is called immediately before edge E is removed from
7721 the edge vector E->dest->preds. */
7722
7723 static void
7724 gimple_execute_on_shrinking_pred (edge e)
7725 {
7726 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7727 remove_phi_args (e);
7728 }
7729
7730 /*---------------------------------------------------------------------------
7731 Helper functions for Loop versioning
7732 ---------------------------------------------------------------------------*/
7733
7734 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7735 of 'first'. Both of them are dominated by 'new_head' basic block. When
7736 'new_head' was created by 'second's incoming edge it received phi arguments
7737 on the edge by split_edge(). Later, additional edge 'e' was created to
7738 connect 'new_head' and 'first'. Now this routine adds phi args on this
7739 additional edge 'e' that new_head to second edge received as part of edge
7740 splitting. */
7741
7742 static void
7743 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7744 basic_block new_head, edge e)
7745 {
7746 gimple phi1, phi2;
7747 gimple_stmt_iterator psi1, psi2;
7748 tree def;
7749 edge e2 = find_edge (new_head, second);
7750
7751 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7752 edge, we should always have an edge from NEW_HEAD to SECOND. */
7753 gcc_assert (e2 != NULL);
7754
7755 /* Browse all 'second' basic block phi nodes and add phi args to
7756 edge 'e' for 'first' head. PHI args are always in correct order. */
7757
7758 for (psi2 = gsi_start_phis (second),
7759 psi1 = gsi_start_phis (first);
7760 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7761 gsi_next (&psi2), gsi_next (&psi1))
7762 {
7763 phi1 = gsi_stmt (psi1);
7764 phi2 = gsi_stmt (psi2);
7765 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7766 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
7767 }
7768 }
7769
7770
7771 /* Adds a if else statement to COND_BB with condition COND_EXPR.
7772 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
7773 the destination of the ELSE part. */
7774
7775 static void
7776 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
7777 basic_block second_head ATTRIBUTE_UNUSED,
7778 basic_block cond_bb, void *cond_e)
7779 {
7780 gimple_stmt_iterator gsi;
7781 gimple new_cond_expr;
7782 tree cond_expr = (tree) cond_e;
7783 edge e0;
7784
7785 /* Build new conditional expr */
7786 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
7787 NULL_TREE, NULL_TREE);
7788
7789 /* Add new cond in cond_bb. */
7790 gsi = gsi_last_bb (cond_bb);
7791 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
7792
7793 /* Adjust edges appropriately to connect new head with first head
7794 as well as second head. */
7795 e0 = single_succ_edge (cond_bb);
7796 e0->flags &= ~EDGE_FALLTHRU;
7797 e0->flags |= EDGE_FALSE_VALUE;
7798 }
7799
7800
7801 /* Do book-keeping of basic block BB for the profile consistency checker.
7802 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
7803 then do post-pass accounting. Store the counting in RECORD. */
7804 static void
7805 gimple_account_profile_record (basic_block bb, int after_pass,
7806 struct profile_record *record)
7807 {
7808 gimple_stmt_iterator i;
7809 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
7810 {
7811 record->size[after_pass]
7812 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
7813 if (profile_status == PROFILE_READ)
7814 record->time[after_pass]
7815 += estimate_num_insns (gsi_stmt (i),
7816 &eni_time_weights) * bb->count;
7817 else if (profile_status == PROFILE_GUESSED)
7818 record->time[after_pass]
7819 += estimate_num_insns (gsi_stmt (i),
7820 &eni_time_weights) * bb->frequency;
7821 }
7822 }
7823
7824 struct cfg_hooks gimple_cfg_hooks = {
7825 "gimple",
7826 gimple_verify_flow_info,
7827 gimple_dump_bb, /* dump_bb */
7828 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
7829 create_bb, /* create_basic_block */
7830 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
7831 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
7832 gimple_can_remove_branch_p, /* can_remove_branch_p */
7833 remove_bb, /* delete_basic_block */
7834 gimple_split_block, /* split_block */
7835 gimple_move_block_after, /* move_block_after */
7836 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
7837 gimple_merge_blocks, /* merge_blocks */
7838 gimple_predict_edge, /* predict_edge */
7839 gimple_predicted_by_p, /* predicted_by_p */
7840 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
7841 gimple_duplicate_bb, /* duplicate_block */
7842 gimple_split_edge, /* split_edge */
7843 gimple_make_forwarder_block, /* make_forward_block */
7844 NULL, /* tidy_fallthru_edge */
7845 NULL, /* force_nonfallthru */
7846 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
7847 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
7848 gimple_flow_call_edges_add, /* flow_call_edges_add */
7849 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
7850 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
7851 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
7852 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
7853 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
7854 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
7855 flush_pending_stmts, /* flush_pending_stmts */
7856 gimple_empty_block_p, /* block_empty_p */
7857 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
7858 gimple_account_profile_record,
7859 };
7860
7861
7862 /* Split all critical edges. */
7863
7864 static unsigned int
7865 split_critical_edges (void)
7866 {
7867 basic_block bb;
7868 edge e;
7869 edge_iterator ei;
7870
7871 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
7872 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
7873 mappings around the calls to split_edge. */
7874 start_recording_case_labels ();
7875 FOR_ALL_BB (bb)
7876 {
7877 FOR_EACH_EDGE (e, ei, bb->succs)
7878 {
7879 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
7880 split_edge (e);
7881 /* PRE inserts statements to edges and expects that
7882 since split_critical_edges was done beforehand, committing edge
7883 insertions will not split more edges. In addition to critical
7884 edges we must split edges that have multiple successors and
7885 end by control flow statements, such as RESX.
7886 Go ahead and split them too. This matches the logic in
7887 gimple_find_edge_insert_loc. */
7888 else if ((!single_pred_p (e->dest)
7889 || !gimple_seq_empty_p (phi_nodes (e->dest))
7890 || e->dest == EXIT_BLOCK_PTR)
7891 && e->src != ENTRY_BLOCK_PTR
7892 && !(e->flags & EDGE_ABNORMAL))
7893 {
7894 gimple_stmt_iterator gsi;
7895
7896 gsi = gsi_last_bb (e->src);
7897 if (!gsi_end_p (gsi)
7898 && stmt_ends_bb_p (gsi_stmt (gsi))
7899 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
7900 && !gimple_call_builtin_p (gsi_stmt (gsi),
7901 BUILT_IN_RETURN)))
7902 split_edge (e);
7903 }
7904 }
7905 }
7906 end_recording_case_labels ();
7907 return 0;
7908 }
7909
7910 namespace {
7911
7912 const pass_data pass_data_split_crit_edges =
7913 {
7914 GIMPLE_PASS, /* type */
7915 "crited", /* name */
7916 OPTGROUP_NONE, /* optinfo_flags */
7917 false, /* has_gate */
7918 true, /* has_execute */
7919 TV_TREE_SPLIT_EDGES, /* tv_id */
7920 PROP_cfg, /* properties_required */
7921 PROP_no_crit_edges, /* properties_provided */
7922 0, /* properties_destroyed */
7923 0, /* todo_flags_start */
7924 TODO_verify_flow, /* todo_flags_finish */
7925 };
7926
7927 class pass_split_crit_edges : public gimple_opt_pass
7928 {
7929 public:
7930 pass_split_crit_edges (gcc::context *ctxt)
7931 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
7932 {}
7933
7934 /* opt_pass methods: */
7935 unsigned int execute () { return split_critical_edges (); }
7936
7937 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
7938 }; // class pass_split_crit_edges
7939
7940 } // anon namespace
7941
7942 gimple_opt_pass *
7943 make_pass_split_crit_edges (gcc::context *ctxt)
7944 {
7945 return new pass_split_crit_edges (ctxt);
7946 }
7947
7948
7949 /* Build a ternary operation and gimplify it. Emit code before GSI.
7950 Return the gimple_val holding the result. */
7951
7952 tree
7953 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
7954 tree type, tree a, tree b, tree c)
7955 {
7956 tree ret;
7957 location_t loc = gimple_location (gsi_stmt (*gsi));
7958
7959 ret = fold_build3_loc (loc, code, type, a, b, c);
7960 STRIP_NOPS (ret);
7961
7962 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7963 GSI_SAME_STMT);
7964 }
7965
7966 /* Build a binary operation and gimplify it. Emit code before GSI.
7967 Return the gimple_val holding the result. */
7968
7969 tree
7970 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
7971 tree type, tree a, tree b)
7972 {
7973 tree ret;
7974
7975 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
7976 STRIP_NOPS (ret);
7977
7978 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7979 GSI_SAME_STMT);
7980 }
7981
7982 /* Build a unary operation and gimplify it. Emit code before GSI.
7983 Return the gimple_val holding the result. */
7984
7985 tree
7986 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
7987 tree a)
7988 {
7989 tree ret;
7990
7991 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
7992 STRIP_NOPS (ret);
7993
7994 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
7995 GSI_SAME_STMT);
7996 }
7997
7998
7999 \f
8000 /* Emit return warnings. */
8001
8002 static unsigned int
8003 execute_warn_function_return (void)
8004 {
8005 source_location location;
8006 gimple last;
8007 edge e;
8008 edge_iterator ei;
8009
8010 if (!targetm.warn_func_return (cfun->decl))
8011 return 0;
8012
8013 /* If we have a path to EXIT, then we do return. */
8014 if (TREE_THIS_VOLATILE (cfun->decl)
8015 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
8016 {
8017 location = UNKNOWN_LOCATION;
8018 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8019 {
8020 last = last_stmt (e->src);
8021 if ((gimple_code (last) == GIMPLE_RETURN
8022 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8023 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8024 break;
8025 }
8026 if (location == UNKNOWN_LOCATION)
8027 location = cfun->function_end_locus;
8028 warning_at (location, 0, "%<noreturn%> function does return");
8029 }
8030
8031 /* If we see "return;" in some basic block, then we do reach the end
8032 without returning a value. */
8033 else if (warn_return_type
8034 && !TREE_NO_WARNING (cfun->decl)
8035 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
8036 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8037 {
8038 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
8039 {
8040 gimple last = last_stmt (e->src);
8041 if (gimple_code (last) == GIMPLE_RETURN
8042 && gimple_return_retval (last) == NULL
8043 && !gimple_no_warning_p (last))
8044 {
8045 location = gimple_location (last);
8046 if (location == UNKNOWN_LOCATION)
8047 location = cfun->function_end_locus;
8048 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8049 TREE_NO_WARNING (cfun->decl) = 1;
8050 break;
8051 }
8052 }
8053 }
8054 return 0;
8055 }
8056
8057
8058 /* Given a basic block B which ends with a conditional and has
8059 precisely two successors, determine which of the edges is taken if
8060 the conditional is true and which is taken if the conditional is
8061 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8062
8063 void
8064 extract_true_false_edges_from_block (basic_block b,
8065 edge *true_edge,
8066 edge *false_edge)
8067 {
8068 edge e = EDGE_SUCC (b, 0);
8069
8070 if (e->flags & EDGE_TRUE_VALUE)
8071 {
8072 *true_edge = e;
8073 *false_edge = EDGE_SUCC (b, 1);
8074 }
8075 else
8076 {
8077 *false_edge = e;
8078 *true_edge = EDGE_SUCC (b, 1);
8079 }
8080 }
8081
8082 namespace {
8083
8084 const pass_data pass_data_warn_function_return =
8085 {
8086 GIMPLE_PASS, /* type */
8087 "*warn_function_return", /* name */
8088 OPTGROUP_NONE, /* optinfo_flags */
8089 false, /* has_gate */
8090 true, /* has_execute */
8091 TV_NONE, /* tv_id */
8092 PROP_cfg, /* properties_required */
8093 0, /* properties_provided */
8094 0, /* properties_destroyed */
8095 0, /* todo_flags_start */
8096 0, /* todo_flags_finish */
8097 };
8098
8099 class pass_warn_function_return : public gimple_opt_pass
8100 {
8101 public:
8102 pass_warn_function_return (gcc::context *ctxt)
8103 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8104 {}
8105
8106 /* opt_pass methods: */
8107 unsigned int execute () { return execute_warn_function_return (); }
8108
8109 }; // class pass_warn_function_return
8110
8111 } // anon namespace
8112
8113 gimple_opt_pass *
8114 make_pass_warn_function_return (gcc::context *ctxt)
8115 {
8116 return new pass_warn_function_return (ctxt);
8117 }
8118
8119 /* Walk a gimplified function and warn for functions whose return value is
8120 ignored and attribute((warn_unused_result)) is set. This is done before
8121 inlining, so we don't have to worry about that. */
8122
8123 static void
8124 do_warn_unused_result (gimple_seq seq)
8125 {
8126 tree fdecl, ftype;
8127 gimple_stmt_iterator i;
8128
8129 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8130 {
8131 gimple g = gsi_stmt (i);
8132
8133 switch (gimple_code (g))
8134 {
8135 case GIMPLE_BIND:
8136 do_warn_unused_result (gimple_bind_body (g));
8137 break;
8138 case GIMPLE_TRY:
8139 do_warn_unused_result (gimple_try_eval (g));
8140 do_warn_unused_result (gimple_try_cleanup (g));
8141 break;
8142 case GIMPLE_CATCH:
8143 do_warn_unused_result (gimple_catch_handler (g));
8144 break;
8145 case GIMPLE_EH_FILTER:
8146 do_warn_unused_result (gimple_eh_filter_failure (g));
8147 break;
8148
8149 case GIMPLE_CALL:
8150 if (gimple_call_lhs (g))
8151 break;
8152 if (gimple_call_internal_p (g))
8153 break;
8154
8155 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8156 LHS. All calls whose value is ignored should be
8157 represented like this. Look for the attribute. */
8158 fdecl = gimple_call_fndecl (g);
8159 ftype = gimple_call_fntype (g);
8160
8161 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8162 {
8163 location_t loc = gimple_location (g);
8164
8165 if (fdecl)
8166 warning_at (loc, OPT_Wunused_result,
8167 "ignoring return value of %qD, "
8168 "declared with attribute warn_unused_result",
8169 fdecl);
8170 else
8171 warning_at (loc, OPT_Wunused_result,
8172 "ignoring return value of function "
8173 "declared with attribute warn_unused_result");
8174 }
8175 break;
8176
8177 default:
8178 /* Not a container, not a call, or a call whose value is used. */
8179 break;
8180 }
8181 }
8182 }
8183
8184 static unsigned int
8185 run_warn_unused_result (void)
8186 {
8187 do_warn_unused_result (gimple_body (current_function_decl));
8188 return 0;
8189 }
8190
8191 static bool
8192 gate_warn_unused_result (void)
8193 {
8194 return flag_warn_unused_result;
8195 }
8196
8197 namespace {
8198
8199 const pass_data pass_data_warn_unused_result =
8200 {
8201 GIMPLE_PASS, /* type */
8202 "*warn_unused_result", /* name */
8203 OPTGROUP_NONE, /* optinfo_flags */
8204 true, /* has_gate */
8205 true, /* has_execute */
8206 TV_NONE, /* tv_id */
8207 PROP_gimple_any, /* properties_required */
8208 0, /* properties_provided */
8209 0, /* properties_destroyed */
8210 0, /* todo_flags_start */
8211 0, /* todo_flags_finish */
8212 };
8213
8214 class pass_warn_unused_result : public gimple_opt_pass
8215 {
8216 public:
8217 pass_warn_unused_result (gcc::context *ctxt)
8218 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8219 {}
8220
8221 /* opt_pass methods: */
8222 bool gate () { return gate_warn_unused_result (); }
8223 unsigned int execute () { return run_warn_unused_result (); }
8224
8225 }; // class pass_warn_unused_result
8226
8227 } // anon namespace
8228
8229 gimple_opt_pass *
8230 make_pass_warn_unused_result (gcc::context *ctxt)
8231 {
8232 return new pass_warn_unused_result (ctxt);
8233 }
8234
8235 /* IPA passes, compilation of earlier functions or inlining
8236 might have changed some properties, such as marked functions nothrow,
8237 pure, const or noreturn.
8238 Remove redundant edges and basic blocks, and create new ones if necessary.
8239
8240 This pass can't be executed as stand alone pass from pass manager, because
8241 in between inlining and this fixup the verify_flow_info would fail. */
8242
8243 unsigned int
8244 execute_fixup_cfg (void)
8245 {
8246 basic_block bb;
8247 gimple_stmt_iterator gsi;
8248 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8249 gcov_type count_scale;
8250 edge e;
8251 edge_iterator ei;
8252
8253 count_scale
8254 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8255 ENTRY_BLOCK_PTR->count);
8256
8257 ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
8258 EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
8259 count_scale);
8260
8261 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
8262 e->count = apply_scale (e->count, count_scale);
8263
8264 FOR_EACH_BB (bb)
8265 {
8266 bb->count = apply_scale (bb->count, count_scale);
8267 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8268 {
8269 gimple stmt = gsi_stmt (gsi);
8270 tree decl = is_gimple_call (stmt)
8271 ? gimple_call_fndecl (stmt)
8272 : NULL;
8273 if (decl)
8274 {
8275 int flags = gimple_call_flags (stmt);
8276 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8277 {
8278 if (gimple_purge_dead_abnormal_call_edges (bb))
8279 todo |= TODO_cleanup_cfg;
8280
8281 if (gimple_in_ssa_p (cfun))
8282 {
8283 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8284 update_stmt (stmt);
8285 }
8286 }
8287
8288 if (flags & ECF_NORETURN
8289 && fixup_noreturn_call (stmt))
8290 todo |= TODO_cleanup_cfg;
8291 }
8292
8293 if (maybe_clean_eh_stmt (stmt)
8294 && gimple_purge_dead_eh_edges (bb))
8295 todo |= TODO_cleanup_cfg;
8296 }
8297
8298 FOR_EACH_EDGE (e, ei, bb->succs)
8299 e->count = apply_scale (e->count, count_scale);
8300
8301 /* If we have a basic block with no successors that does not
8302 end with a control statement or a noreturn call end it with
8303 a call to __builtin_unreachable. This situation can occur
8304 when inlining a noreturn call that does in fact return. */
8305 if (EDGE_COUNT (bb->succs) == 0)
8306 {
8307 gimple stmt = last_stmt (bb);
8308 if (!stmt
8309 || (!is_ctrl_stmt (stmt)
8310 && (!is_gimple_call (stmt)
8311 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8312 {
8313 stmt = gimple_build_call
8314 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8315 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8316 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8317 }
8318 }
8319 }
8320 if (count_scale != REG_BR_PROB_BASE)
8321 compute_function_frequency ();
8322
8323 /* We just processed all calls. */
8324 if (cfun->gimple_df)
8325 vec_free (MODIFIED_NORETURN_CALLS (cfun));
8326
8327 /* Dump a textual representation of the flowgraph. */
8328 if (dump_file)
8329 gimple_dump_cfg (dump_file, dump_flags);
8330
8331 if (current_loops
8332 && (todo & TODO_cleanup_cfg))
8333 loops_state_set (LOOPS_NEED_FIXUP);
8334
8335 return todo;
8336 }
8337
8338 namespace {
8339
8340 const pass_data pass_data_fixup_cfg =
8341 {
8342 GIMPLE_PASS, /* type */
8343 "*free_cfg_annotations", /* name */
8344 OPTGROUP_NONE, /* optinfo_flags */
8345 false, /* has_gate */
8346 true, /* has_execute */
8347 TV_NONE, /* tv_id */
8348 PROP_cfg, /* properties_required */
8349 0, /* properties_provided */
8350 0, /* properties_destroyed */
8351 0, /* todo_flags_start */
8352 0, /* todo_flags_finish */
8353 };
8354
8355 class pass_fixup_cfg : public gimple_opt_pass
8356 {
8357 public:
8358 pass_fixup_cfg (gcc::context *ctxt)
8359 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8360 {}
8361
8362 /* opt_pass methods: */
8363 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8364 unsigned int execute () { return execute_fixup_cfg (); }
8365
8366 }; // class pass_fixup_cfg
8367
8368 } // anon namespace
8369
8370 gimple_opt_pass *
8371 make_pass_fixup_cfg (gcc::context *ctxt)
8372 {
8373 return new pass_fixup_cfg (ctxt);
8374 }
8375
8376 /* Garbage collection support for edge_def. */
8377
8378 extern void gt_ggc_mx (tree&);
8379 extern void gt_ggc_mx (gimple&);
8380 extern void gt_ggc_mx (rtx&);
8381 extern void gt_ggc_mx (basic_block&);
8382
8383 void
8384 gt_ggc_mx (edge_def *e)
8385 {
8386 tree block = LOCATION_BLOCK (e->goto_locus);
8387 gt_ggc_mx (e->src);
8388 gt_ggc_mx (e->dest);
8389 if (current_ir_type () == IR_GIMPLE)
8390 gt_ggc_mx (e->insns.g);
8391 else
8392 gt_ggc_mx (e->insns.r);
8393 gt_ggc_mx (block);
8394 }
8395
8396 /* PCH support for edge_def. */
8397
8398 extern void gt_pch_nx (tree&);
8399 extern void gt_pch_nx (gimple&);
8400 extern void gt_pch_nx (rtx&);
8401 extern void gt_pch_nx (basic_block&);
8402
8403 void
8404 gt_pch_nx (edge_def *e)
8405 {
8406 tree block = LOCATION_BLOCK (e->goto_locus);
8407 gt_pch_nx (e->src);
8408 gt_pch_nx (e->dest);
8409 if (current_ir_type () == IR_GIMPLE)
8410 gt_pch_nx (e->insns.g);
8411 else
8412 gt_pch_nx (e->insns.r);
8413 gt_pch_nx (block);
8414 }
8415
8416 void
8417 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8418 {
8419 tree block = LOCATION_BLOCK (e->goto_locus);
8420 op (&(e->src), cookie);
8421 op (&(e->dest), cookie);
8422 if (current_ir_type () == IR_GIMPLE)
8423 op (&(e->insns.g), cookie);
8424 else
8425 op (&(e->insns.r), cookie);
8426 op (&(block), cookie);
8427 }